From 5eb3fc72c4d0764fb4645d70cd6ec2d71b04aa14 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Fri, 7 Jun 2019 08:58:31 -0700 Subject: [PATCH 01/30] [AutoPR datafactory/resource-manager] [Datafactory] ADLS Gen 2 support for HDI BYOC and vNet support for HDI on demand (#5663) * Generated from e4bd3471cedb625a2d65c1045f8d13f532f3f945 ADLS Gen 2 support for HDI BYOC and vNet support for HDI on demand * Packaging update of azure-mgmt-datafactory --- .../azure-mgmt-datafactory/MANIFEST.in | 1 + .../azure/mgmt/datafactory/models/__init__.py | 137 ++++++++++++++++++ .../models/amazon_mws_linked_service.py | 2 +- .../models/amazon_mws_linked_service_py3.py | 2 +- .../datafactory/models/amazon_mws_source.py | 5 + .../models/amazon_mws_source_py3.py | 9 +- .../models/amazon_redshift_linked_service.py | 2 +- .../amazon_redshift_linked_service_py3.py | 2 +- .../models/amazon_redshift_source.py | 5 + .../models/amazon_redshift_source_py3.py | 9 +- .../datafactory/models/amazon_s3_dataset.py | 10 ++ .../models/amazon_s3_dataset_py3.py | 12 +- .../models/amazon_s3_linked_service.py | 9 +- .../models/amazon_s3_linked_service_py3.py | 11 +- .../models/azure_batch_linked_service.py | 2 +- .../models/azure_batch_linked_service_py3.py | 2 +- .../datafactory/models/azure_blob_dataset.py | 10 ++ .../models/azure_blob_dataset_py3.py | 12 +- .../models/azure_blob_fs_dataset.py | 85 +++++++++++ .../models/azure_blob_fs_dataset_py3.py | 85 +++++++++++ .../models/azure_blob_fs_linked_service.py | 86 +++++++++++ .../azure_blob_fs_linked_service_py3.py | 86 +++++++++++ .../datafactory/models/azure_blob_fs_sink.py | 67 +++++++++ .../models/azure_blob_fs_sink_py3.py | 67 +++++++++ .../models/azure_blob_fs_source.py | 68 +++++++++ .../models/azure_blob_fs_source_py3.py | 68 +++++++++ .../azure_blob_storage_linked_service.py | 2 +- .../azure_blob_storage_linked_service_py3.py | 2 +- .../azure_data_explorer_linked_service.py | 86 +++++++++++ .../azure_data_explorer_linked_service_py3.py | 86 +++++++++++ .../models/azure_data_explorer_sink.py | 76 ++++++++++ .../models/azure_data_explorer_sink_py3.py | 76 ++++++++++ .../models/azure_data_explorer_source.py | 70 +++++++++ .../models/azure_data_explorer_source_py3.py | 70 +++++++++ .../azure_data_explorer_table_dataset.py | 72 +++++++++ .../azure_data_explorer_table_dataset_py3.py | 72 +++++++++ ...zure_data_lake_analytics_linked_service.py | 2 +- ..._data_lake_analytics_linked_service_py3.py | 2 +- .../models/azure_data_lake_store_dataset.py | 5 +- .../azure_data_lake_store_dataset_py3.py | 7 +- .../azure_data_lake_store_linked_service.py | 2 +- ...zure_data_lake_store_linked_service_py3.py | 2 +- .../models/azure_data_lake_store_sink.py | 5 + .../models/azure_data_lake_store_sink_py3.py | 9 +- .../models/azure_data_lake_store_source.py | 5 + .../azure_data_lake_store_source_py3.py | 9 +- .../models/azure_databricks_linked_service.py | 18 ++- .../azure_databricks_linked_service_py3.py | 20 ++- .../models/azure_function_linked_service.py | 2 +- .../azure_function_linked_service_py3.py | 2 +- .../models/azure_key_vault_linked_service.py | 2 +- .../azure_key_vault_linked_service_py3.py | 2 +- .../models/azure_ml_linked_service.py | 2 +- .../models/azure_ml_linked_service_py3.py | 2 +- .../models/azure_my_sql_linked_service.py | 2 +- .../models/azure_my_sql_linked_service_py3.py | 2 +- .../datafactory/models/azure_my_sql_source.py | 5 + .../models/azure_my_sql_source_py3.py | 9 +- .../azure_postgre_sql_linked_service.py | 2 +- .../azure_postgre_sql_linked_service_py3.py | 2 +- .../models/azure_postgre_sql_source.py | 5 + .../models/azure_postgre_sql_source_py3.py | 9 +- .../datafactory/models/azure_queue_sink.py | 5 + .../models/azure_queue_sink_py3.py | 9 +- .../models/azure_search_index_sink.py | 5 + .../models/azure_search_index_sink_py3.py | 9 +- .../models/azure_search_linked_service.py | 2 +- .../models/azure_search_linked_service_py3.py | 2 +- .../azure_sql_database_linked_service.py | 2 +- .../azure_sql_database_linked_service_py3.py | 2 +- .../models/azure_sql_dw_linked_service.py | 2 +- .../models/azure_sql_dw_linked_service_py3.py | 2 +- .../models/azure_storage_linked_service.py | 2 +- .../azure_storage_linked_service_py3.py | 2 +- .../datafactory/models/azure_table_sink.py | 5 + .../models/azure_table_sink_py3.py | 9 +- .../datafactory/models/azure_table_source.py | 5 + .../models/azure_table_source_py3.py | 9 +- .../azure_table_storage_linked_service.py | 2 +- .../azure_table_storage_linked_service_py3.py | 2 +- .../datafactory/models/blob_events_trigger.py | 4 + .../models/blob_events_trigger_py3.py | 8 +- .../mgmt/datafactory/models/blob_sink.py | 5 + .../mgmt/datafactory/models/blob_sink_py3.py | 9 +- .../mgmt/datafactory/models/blob_source.py | 5 + .../datafactory/models/blob_source_py3.py | 9 +- .../mgmt/datafactory/models/blob_trigger.py | 4 + .../datafactory/models/blob_trigger_py3.py | 8 +- .../models/cassandra_linked_service.py | 2 +- .../models/cassandra_linked_service_py3.py | 2 +- .../datafactory/models/cassandra_source.py | 5 + .../models/cassandra_source_py3.py | 9 +- .../models/concur_linked_service.py | 2 +- .../models/concur_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/concur_source.py | 5 + .../datafactory/models/concur_source_py3.py | 9 +- .../datafactory/models/control_activity.py | 8 +- .../models/control_activity_py3.py | 8 +- .../mgmt/datafactory/models/copy_activity.py | 4 + .../datafactory/models/copy_activity_py3.py | 6 +- .../mgmt/datafactory/models/copy_sink.py | 17 ++- .../mgmt/datafactory/models/copy_sink_py3.py | 19 ++- .../mgmt/datafactory/models/copy_source.py | 24 ++- .../datafactory/models/copy_source_py3.py | 26 ++-- .../models/cosmos_db_linked_service.py | 2 +- .../models/cosmos_db_linked_service_py3.py | 2 +- ...smos_db_mongo_db_api_collection_dataset.py | 73 ++++++++++ ..._db_mongo_db_api_collection_dataset_py3.py | 73 ++++++++++ .../cosmos_db_mongo_db_api_linked_service.py | 67 +++++++++ ...smos_db_mongo_db_api_linked_service_py3.py | 67 +++++++++ .../models/cosmos_db_mongo_db_api_sink.py | 68 +++++++++ .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 +++++++++ .../models/cosmos_db_mongo_db_api_source.py | 71 +++++++++ .../cosmos_db_mongo_db_api_source_py3.py | 71 +++++++++ .../models/couchbase_linked_service.py | 2 +- .../models/couchbase_linked_service_py3.py | 2 +- .../datafactory/models/couchbase_source.py | 5 + .../models/couchbase_source_py3.py | 9 +- .../datafactory/models/custom_activity.py | 5 + .../datafactory/models/custom_activity_py3.py | 7 +- .../custom_data_source_linked_service.py | 2 +- .../custom_data_source_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/custom_dataset.py | 3 +- .../datafactory/models/custom_dataset_py3.py | 5 +- .../data_factory_management_client_enums.py | 30 ++++ .../azure/mgmt/datafactory/models/dataset.py | 41 +++--- .../mgmt/datafactory/models/dataset_py3.py | 41 +++--- .../datafactory/models/db2_linked_service.py | 2 +- .../models/db2_linked_service_py3.py | 2 +- .../models/document_db_collection_sink.py | 5 + .../models/document_db_collection_sink_py3.py | 9 +- .../models/document_db_collection_source.py | 5 + .../document_db_collection_source_py3.py | 9 +- .../models/drill_linked_service.py | 2 +- .../models/drill_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/drill_source.py | 5 + .../datafactory/models/drill_source_py3.py | 9 +- .../models/dynamics_ax_linked_service.py | 93 ++++++++++++ .../models/dynamics_ax_linked_service_py3.py | 93 ++++++++++++ .../models/dynamics_ax_resource_dataset.py | 73 ++++++++++ .../dynamics_ax_resource_dataset_py3.py | 73 ++++++++++ .../datafactory/models/dynamics_ax_source.py | 57 ++++++++ .../models/dynamics_ax_source_py3.py | 57 ++++++++ .../models/dynamics_linked_service.py | 2 +- .../models/dynamics_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/dynamics_sink.py | 5 + .../datafactory/models/dynamics_sink_py3.py | 9 +- .../datafactory/models/dynamics_source.py | 5 + .../datafactory/models/dynamics_source_py3.py | 9 +- .../models/eloqua_linked_service.py | 2 +- .../models/eloqua_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/eloqua_source.py | 5 + .../datafactory/models/eloqua_source_py3.py | 9 +- .../models/file_server_linked_service.py | 2 +- .../models/file_server_linked_service_py3.py | 2 +- .../datafactory/models/file_share_dataset.py | 10 ++ .../models/file_share_dataset_py3.py | 12 +- .../datafactory/models/file_system_sink.py | 5 + .../models/file_system_sink_py3.py | 9 +- .../datafactory/models/file_system_source.py | 5 + .../models/file_system_source_py3.py | 9 +- .../models/ftp_server_linked_service.py | 2 +- .../models/ftp_server_linked_service_py3.py | 2 +- .../models/google_ad_words_linked_service.py | 119 +++++++++++++++ .../google_ad_words_linked_service_py3.py | 119 +++++++++++++++ .../models/google_ad_words_object_dataset.py | 72 +++++++++ .../google_ad_words_object_dataset_py3.py | 72 +++++++++ .../models/google_ad_words_source.py | 57 ++++++++ .../models/google_ad_words_source_py3.py | 57 ++++++++ .../models/google_big_query_linked_service.py | 2 +- .../google_big_query_linked_service_py3.py | 2 +- .../models/google_big_query_source.py | 5 + .../models/google_big_query_source_py3.py | 9 +- .../models/greenplum_linked_service.py | 2 +- .../models/greenplum_linked_service_py3.py | 2 +- .../datafactory/models/greenplum_source.py | 5 + .../models/greenplum_source_py3.py | 9 +- .../models/hbase_linked_service.py | 2 +- .../models/hbase_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hbase_source.py | 5 + .../datafactory/models/hbase_source_py3.py | 9 +- .../models/hd_insight_linked_service.py | 8 +- .../models/hd_insight_linked_service_py3.py | 10 +- .../hd_insight_on_demand_linked_service.py | 14 +- ...hd_insight_on_demand_linked_service_py3.py | 16 +- .../datafactory/models/hdfs_linked_service.py | 2 +- .../models/hdfs_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hdfs_source.py | 5 + .../datafactory/models/hdfs_source_py3.py | 9 +- .../datafactory/models/hive_linked_service.py | 2 +- .../models/hive_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hive_source.py | 5 + .../datafactory/models/hive_source_py3.py | 9 +- .../datafactory/models/http_linked_service.py | 2 +- .../models/http_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/http_source.py | 5 + .../datafactory/models/http_source_py3.py | 9 +- .../models/hubspot_linked_service.py | 2 +- .../models/hubspot_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/hubspot_source.py | 5 + .../datafactory/models/hubspot_source_py3.py | 9 +- .../models/impala_linked_service.py | 2 +- .../models/impala_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/impala_source.py | 5 + .../datafactory/models/impala_source_py3.py | 9 +- .../datafactory/models/jira_linked_service.py | 2 +- .../models/jira_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/jira_source.py | 5 + .../datafactory/models/jira_source_py3.py | 9 +- .../mgmt/datafactory/models/linked_service.py | 64 ++++---- .../datafactory/models/linked_service_py3.py | 64 ++++---- .../models/magento_linked_service.py | 2 +- .../models/magento_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/magento_source.py | 5 + .../datafactory/models/magento_source_py3.py | 9 +- .../models/maria_db_linked_service.py | 2 +- .../models/maria_db_linked_service_py3.py | 2 +- .../datafactory/models/maria_db_source.py | 5 + .../datafactory/models/maria_db_source_py3.py | 9 +- .../models/marketo_linked_service.py | 2 +- .../models/marketo_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/marketo_source.py | 5 + .../datafactory/models/marketo_source_py3.py | 9 +- .../mongo_db_cursor_methods_properties.py | 53 +++++++ .../mongo_db_cursor_methods_properties_py3.py | 53 +++++++ .../models/mongo_db_linked_service.py | 2 +- .../models/mongo_db_linked_service_py3.py | 2 +- .../datafactory/models/mongo_db_source.py | 5 + .../datafactory/models/mongo_db_source_py3.py | 9 +- .../models/mongo_db_v2_collection_dataset.py | 73 ++++++++++ .../mongo_db_v2_collection_dataset_py3.py | 73 ++++++++++ .../models/mongo_db_v2_linked_service.py | 66 +++++++++ .../models/mongo_db_v2_linked_service_py3.py | 66 +++++++++ .../datafactory/models/mongo_db_v2_source.py | 71 +++++++++ .../models/mongo_db_v2_source_py3.py | 71 +++++++++ .../models/multiple_pipeline_trigger.py | 4 + .../models/multiple_pipeline_trigger_py3.py | 8 +- .../models/my_sql_linked_service.py | 2 +- .../models/my_sql_linked_service_py3.py | 2 +- .../models/netezza_linked_service.py | 2 +- .../models/netezza_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/netezza_source.py | 5 + .../datafactory/models/netezza_source_py3.py | 9 +- .../models/odata_linked_service.py | 51 ++++++- .../models/odata_linked_service_py3.py | 53 ++++++- .../datafactory/models/odbc_linked_service.py | 2 +- .../models/odbc_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/odbc_sink.py | 5 + .../mgmt/datafactory/models/odbc_sink_py3.py | 9 +- .../datafactory/models/office365_dataset.py | 79 ++++++++++ .../models/office365_dataset_py3.py | 79 ++++++++++ .../models/office365_linked_service.py | 83 +++++++++++ .../models/office365_linked_service_py3.py | 83 +++++++++++ .../datafactory/models/office365_source.py | 52 +++++++ .../models/office365_source_py3.py | 52 +++++++ .../models/oracle_linked_service.py | 2 +- .../models/oracle_linked_service_py3.py | 2 +- .../oracle_service_cloud_linked_service.py | 95 ++++++++++++ ...oracle_service_cloud_linked_service_py3.py | 95 ++++++++++++ .../oracle_service_cloud_object_dataset.py | 72 +++++++++ ...oracle_service_cloud_object_dataset_py3.py | 72 +++++++++ .../models/oracle_service_cloud_source.py | 57 ++++++++ .../models/oracle_service_cloud_source_py3.py | 57 ++++++++ .../mgmt/datafactory/models/oracle_sink.py | 5 + .../datafactory/models/oracle_sink_py3.py | 9 +- .../mgmt/datafactory/models/oracle_source.py | 5 + .../datafactory/models/oracle_source_py3.py | 9 +- .../models/oracle_table_dataset.py | 5 +- .../models/oracle_table_dataset_py3.py | 7 +- .../models/paypal_linked_service.py | 2 +- .../models/paypal_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/paypal_source.py | 5 + .../datafactory/models/paypal_source_py3.py | 9 +- .../models/phoenix_linked_service.py | 2 +- .../models/phoenix_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/phoenix_source.py | 5 + .../datafactory/models/phoenix_source_py3.py | 9 +- .../mgmt/datafactory/models/pipeline_run.py | 12 ++ .../datafactory/models/pipeline_run_py3.py | 12 ++ .../models/postgre_sql_linked_service.py | 2 +- .../models/postgre_sql_linked_service_py3.py | 2 +- .../models/presto_linked_service.py | 2 +- .../models/presto_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/presto_source.py | 5 + .../datafactory/models/presto_source_py3.py | 9 +- .../models/quick_books_linked_service.py | 2 +- .../models/quick_books_linked_service_py3.py | 2 +- .../datafactory/models/quick_books_source.py | 5 + .../models/quick_books_source_py3.py | 9 +- .../datafactory/models/relational_source.py | 5 + .../models/relational_source_py3.py | 9 +- .../models/rerun_tumbling_window_trigger.py | 4 + .../rerun_tumbling_window_trigger_py3.py | 8 +- .../models/responsys_linked_service.py | 2 +- .../models/responsys_linked_service_py3.py | 2 +- .../datafactory/models/responsys_source.py | 5 + .../models/responsys_source_py3.py | 9 +- .../models/rest_resource_dataset.py | 93 ++++++++++++ .../models/rest_resource_dataset_py3.py | 93 ++++++++++++ .../models/rest_service_linked_service.py | 107 ++++++++++++++ .../models/rest_service_linked_service_py3.py | 107 ++++++++++++++ .../mgmt/datafactory/models/rest_source.py | 65 +++++++++ .../datafactory/models/rest_source_py3.py | 65 +++++++++ .../datafactory/models/run_query_filter.py | 2 +- .../models/run_query_filter_py3.py | 2 +- .../models/salesforce_linked_service.py | 2 +- .../models/salesforce_linked_service_py3.py | 2 +- ...lesforce_marketing_cloud_linked_service.py | 2 +- ...orce_marketing_cloud_linked_service_py3.py | 2 +- .../salesforce_marketing_cloud_source.py | 5 + .../salesforce_marketing_cloud_source_py3.py | 9 +- .../datafactory/models/salesforce_sink.py | 5 + .../datafactory/models/salesforce_sink_py3.py | 9 +- .../datafactory/models/salesforce_source.py | 5 + .../models/salesforce_source_py3.py | 9 +- .../models/sap_bw_linked_service.py | 2 +- .../models/sap_bw_linked_service_py3.py | 2 +- .../sap_cloud_for_customer_linked_service.py | 2 +- ...p_cloud_for_customer_linked_service_py3.py | 2 +- .../models/sap_cloud_for_customer_sink.py | 5 + .../models/sap_cloud_for_customer_sink_py3.py | 9 +- .../models/sap_cloud_for_customer_source.py | 5 + .../sap_cloud_for_customer_source_py3.py | 9 +- .../models/sap_ecc_linked_service.py | 2 +- .../models/sap_ecc_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/sap_ecc_source.py | 5 + .../datafactory/models/sap_ecc_source_py3.py | 9 +- .../models/sap_hana_linked_service.py | 2 +- .../models/sap_hana_linked_service_py3.py | 2 +- .../models/sap_open_hub_linked_service.py | 99 +++++++++++++ .../models/sap_open_hub_linked_service_py3.py | 99 +++++++++++++ .../datafactory/models/sap_open_hub_source.py | 53 +++++++ .../models/sap_open_hub_source_py3.py | 53 +++++++ .../models/sap_open_hub_table_dataset.py | 87 +++++++++++ .../models/sap_open_hub_table_dataset_py3.py | 87 +++++++++++ .../datafactory/models/schedule_trigger.py | 4 + .../models/schedule_trigger_py3.py | 8 +- .../models/service_now_linked_service.py | 2 +- .../models/service_now_linked_service_py3.py | 2 +- .../datafactory/models/service_now_source.py | 5 + .../models/service_now_source_py3.py | 9 +- .../models/sftp_server_linked_service.py | 2 +- .../models/sftp_server_linked_service_py3.py | 2 +- .../models/shopify_linked_service.py | 2 +- .../models/shopify_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/shopify_source.py | 5 + .../datafactory/models/shopify_source_py3.py | 9 +- .../models/spark_linked_service.py | 2 +- .../models/spark_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/spark_source.py | 5 + .../datafactory/models/spark_source_py3.py | 9 +- .../mgmt/datafactory/models/sql_dw_sink.py | 5 + .../datafactory/models/sql_dw_sink_py3.py | 9 +- .../mgmt/datafactory/models/sql_dw_source.py | 5 + .../datafactory/models/sql_dw_source_py3.py | 9 +- .../models/sql_server_linked_service.py | 2 +- .../models/sql_server_linked_service_py3.py | 2 +- .../azure/mgmt/datafactory/models/sql_sink.py | 5 + .../mgmt/datafactory/models/sql_sink_py3.py | 9 +- .../mgmt/datafactory/models/sql_source.py | 5 + .../mgmt/datafactory/models/sql_source_py3.py | 9 +- .../models/square_linked_service.py | 2 +- .../models/square_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/square_source.py | 5 + .../datafactory/models/square_source_py3.py | 9 +- .../datafactory/models/ssis_environment.py | 51 +++++++ .../models/ssis_environment_py3.py | 51 +++++++ .../models/ssis_environment_reference.py | 40 +++++ .../models/ssis_environment_reference_py3.py | 40 +++++ .../mgmt/datafactory/models/ssis_folder.py | 43 ++++++ .../datafactory/models/ssis_folder_py3.py | 43 ++++++ .../models/ssis_object_metadata.py | 7 + .../models/ssis_object_metadata_py3.py | 7 + .../mgmt/datafactory/models/ssis_package.py | 59 ++++++++ .../datafactory/models/ssis_package_py3.py | 59 ++++++++ .../mgmt/datafactory/models/ssis_parameter.py | 72 +++++++++ .../datafactory/models/ssis_parameter_py3.py | 72 +++++++++ .../mgmt/datafactory/models/ssis_project.py | 60 ++++++++ .../datafactory/models/ssis_project_py3.py | 60 ++++++++ .../mgmt/datafactory/models/ssis_variable.py | 52 +++++++ .../datafactory/models/ssis_variable_py3.py | 52 +++++++ .../models/sybase_linked_service.py | 2 +- .../models/sybase_linked_service_py3.py | 2 +- .../datafactory/models/tabular_translator.py | 6 + .../models/tabular_translator_py3.py | 8 +- .../models/teradata_linked_service.py | 2 +- .../models/teradata_linked_service_py3.py | 2 +- .../azure/mgmt/datafactory/models/trigger.py | 5 + .../mgmt/datafactory/models/trigger_py3.py | 7 +- .../models/tumbling_window_trigger.py | 4 + .../models/tumbling_window_trigger_py3.py | 8 +- .../datafactory/models/validation_activity.py | 81 +++++++++++ .../models/validation_activity_py3.py | 81 +++++++++++ .../models/vertica_linked_service.py | 2 +- .../models/vertica_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/vertica_source.py | 5 + .../datafactory/models/vertica_source_py3.py | 9 +- .../datafactory/models/web_hook_activity.py | 92 ++++++++++++ .../models/web_hook_activity_py3.py | 92 ++++++++++++ .../datafactory/models/web_linked_service.py | 2 +- .../models/web_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/web_source.py | 5 + .../mgmt/datafactory/models/web_source_py3.py | 9 +- .../datafactory/models/xero_linked_service.py | 2 +- .../models/xero_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/xero_source.py | 5 + .../datafactory/models/xero_source_py3.py | 9 +- .../datafactory/models/zoho_linked_service.py | 2 +- .../models/zoho_linked_service_py3.py | 2 +- .../mgmt/datafactory/models/zoho_source.py | 5 + .../datafactory/models/zoho_source_py3.py | 9 +- .../operations/exposure_control_operations.py | 72 +++++++++ .../operations/pipelines_operations.py | 13 +- .../azure/mgmt/datafactory/version.py | 2 +- .../azure-mgmt-datafactory/setup.py | 1 + 415 files changed, 7979 insertions(+), 457 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in index 6ceb27f7a96e..e4884efef41b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in +++ b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in @@ -1,3 +1,4 @@ +recursive-include tests *.py *.yaml include *.rst include azure/__init__.py include azure/mgmt/__init__.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 46e9bf12bf1a..f8279c1a99bd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -95,6 +95,10 @@ from .schedule_trigger_py3 import ScheduleTrigger from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .azure_function_linked_service_py3 import AzureFunctionLinkedService + from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService from .responsys_linked_service_py3 import ResponsysLinkedService from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService @@ -138,10 +142,16 @@ from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService from .amazon_s3_linked_service_py3 import AmazonS3LinkedService + from .rest_service_linked_service_py3 import RestServiceLinkedService + from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService from .sap_ecc_linked_service_py3 import SapEccLinkedService from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService from .salesforce_linked_service_py3 import SalesforceLinkedService + from .office365_linked_service_py3 import Office365LinkedService + from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService from .mongo_db_linked_service_py3 import MongoDbLinkedService from .cassandra_linked_service_py3 import CassandraLinkedService from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication @@ -172,6 +182,10 @@ from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService from .azure_storage_linked_service_py3 import AzureStorageLinkedService + from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset from .responsys_object_dataset_py3 import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset_py3 import VerticaTableDataset @@ -216,7 +230,9 @@ from .http_dataset_py3 import HttpDataset from .azure_search_index_dataset_py3 import AzureSearchIndexDataset from .web_table_dataset_py3 import WebTableDataset + from .rest_resource_dataset_py3 import RestResourceDataset from .sql_server_table_dataset_py3 import SqlServerTableDataset + from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset @@ -224,8 +240,12 @@ from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset from .file_share_dataset_py3 import FileShareDataset + from .office365_dataset_py3 import Office365Dataset + from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset from .dynamics_entity_dataset_py3 import DynamicsEntityDataset from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset @@ -250,6 +270,9 @@ from .web_activity_py3 import WebActivity from .redshift_unload_settings_py3 import RedshiftUnloadSettings from .amazon_redshift_source_py3 import AmazonRedshiftSource + from .google_ad_words_source_py3 import GoogleAdWordsSource + from .oracle_service_cloud_source_py3 import OracleServiceCloudSource + from .dynamics_ax_source_py3 import DynamicsAXSource from .responsys_source_py3 import ResponsysSource from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource from .vertica_source_py3 import VerticaSource @@ -281,11 +304,17 @@ from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource from .amazon_mws_source_py3 import AmazonMWSSource from .http_source_py3 import HttpSource + from .azure_blob_fs_source_py3 import AzureBlobFSSource from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource + from .office365_source_py3 import Office365Source + from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource + from .mongo_db_v2_source_py3 import MongoDbV2Source from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource from .oracle_source_py3 import OracleSource + from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource from .distcp_settings_py3 import DistcpSettings from .hdfs_source_py3 import HdfsSource @@ -293,6 +322,8 @@ from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter from .sql_source_py3 import SqlSource + from .rest_source_py3 import RestSource + from .sap_open_hub_source_py3 import SapOpenHubSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_source_py3 import SalesforceSource @@ -322,10 +353,13 @@ from .staging_settings_py3 import StagingSettings from .tabular_translator_py3 import TabularTranslator from .copy_translator_py3 import CopyTranslator + from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_sink_py3 import SalesforceSink + from .azure_data_explorer_sink_py3 import AzureDataExplorerSink from .dynamics_sink_py3 import DynamicsSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink + from .azure_blob_fs_sink_py3 import AzureBlobFSSink from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings @@ -340,9 +374,11 @@ from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity + from .web_hook_activity_py3 import WebHookActivity from .append_variable_activity_py3 import AppendVariableActivity from .set_variable_activity_py3 import SetVariableActivity from .filter_activity_py3 import FilterActivity + from .validation_activity_py3 import ValidationActivity from .until_activity_py3 import UntilActivity from .wait_activity_py3 import WaitActivity from .for_each_activity_py3 import ForEachActivity @@ -367,6 +403,13 @@ from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties from .managed_integration_runtime_py3 import ManagedIntegrationRuntime from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress + from .ssis_variable_py3 import SsisVariable + from .ssis_environment_py3 import SsisEnvironment + from .ssis_parameter_py3 import SsisParameter + from .ssis_package_py3 import SsisPackage + from .ssis_environment_reference_py3 import SsisEnvironmentReference + from .ssis_project_py3 import SsisProject + from .ssis_folder_py3 import SsisFolder from .ssis_object_metadata_py3 import SsisObjectMetadata from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData @@ -460,6 +503,10 @@ from .schedule_trigger import ScheduleTrigger from .multiple_pipeline_trigger import MultiplePipelineTrigger from .azure_function_linked_service import AzureFunctionLinkedService + from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .google_ad_words_linked_service import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service import DynamicsAXLinkedService from .responsys_linked_service import ResponsysLinkedService from .azure_databricks_linked_service import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService @@ -503,10 +550,16 @@ from .custom_data_source_linked_service import CustomDataSourceLinkedService from .amazon_redshift_linked_service import AmazonRedshiftLinkedService from .amazon_s3_linked_service import AmazonS3LinkedService + from .rest_service_linked_service import RestServiceLinkedService + from .sap_open_hub_linked_service import SapOpenHubLinkedService from .sap_ecc_linked_service import SapEccLinkedService from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService from .salesforce_linked_service import SalesforceLinkedService + from .office365_linked_service import Office365LinkedService + from .azure_blob_fs_linked_service import AzureBlobFSLinkedService from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service import MongoDbV2LinkedService from .mongo_db_linked_service import MongoDbLinkedService from .cassandra_linked_service import CassandraLinkedService from .web_client_certificate_authentication import WebClientCertificateAuthentication @@ -537,6 +590,10 @@ from .azure_table_storage_linked_service import AzureTableStorageLinkedService from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService from .azure_storage_linked_service import AzureStorageLinkedService + from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset from .responsys_object_dataset import ResponsysObjectDataset from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset from .vertica_table_dataset import VerticaTableDataset @@ -581,7 +638,9 @@ from .http_dataset import HttpDataset from .azure_search_index_dataset import AzureSearchIndexDataset from .web_table_dataset import WebTableDataset + from .rest_resource_dataset import RestResourceDataset from .sql_server_table_dataset import SqlServerTableDataset + from .sap_open_hub_table_dataset import SapOpenHubTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset from .salesforce_object_dataset import SalesforceObjectDataset @@ -589,8 +648,12 @@ from .azure_my_sql_table_dataset import AzureMySqlTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset from .mongo_db_collection_dataset import MongoDbCollectionDataset from .file_share_dataset import FileShareDataset + from .office365_dataset import Office365Dataset + from .azure_blob_fs_dataset import AzureBlobFSDataset from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset from .dynamics_entity_dataset import DynamicsEntityDataset from .document_db_collection_dataset import DocumentDbCollectionDataset @@ -615,6 +678,9 @@ from .web_activity import WebActivity from .redshift_unload_settings import RedshiftUnloadSettings from .amazon_redshift_source import AmazonRedshiftSource + from .google_ad_words_source import GoogleAdWordsSource + from .oracle_service_cloud_source import OracleServiceCloudSource + from .dynamics_ax_source import DynamicsAXSource from .responsys_source import ResponsysSource from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource from .vertica_source import VerticaSource @@ -646,11 +712,17 @@ from .azure_postgre_sql_source import AzurePostgreSqlSource from .amazon_mws_source import AmazonMWSSource from .http_source import HttpSource + from .azure_blob_fs_source import AzureBlobFSSource from .azure_data_lake_store_source import AzureDataLakeStoreSource + from .office365_source import Office365Source + from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource + from .mongo_db_v2_source import MongoDbV2Source from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource from .oracle_source import OracleSource + from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource from .distcp_settings import DistcpSettings from .hdfs_source import HdfsSource @@ -658,6 +730,8 @@ from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter from .sql_source import SqlSource + from .rest_source import RestSource + from .sap_open_hub_source import SapOpenHubSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_source import SalesforceSource @@ -687,10 +761,13 @@ from .staging_settings import StagingSettings from .tabular_translator import TabularTranslator from .copy_translator import CopyTranslator + from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_sink import SalesforceSink + from .azure_data_explorer_sink import AzureDataExplorerSink from .dynamics_sink import DynamicsSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink + from .azure_blob_fs_sink import AzureBlobFSSink from .azure_data_lake_store_sink import AzureDataLakeStoreSink from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings @@ -705,9 +782,11 @@ from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity + from .web_hook_activity import WebHookActivity from .append_variable_activity import AppendVariableActivity from .set_variable_activity import SetVariableActivity from .filter_activity import FilterActivity + from .validation_activity import ValidationActivity from .until_activity import UntilActivity from .wait_activity import WaitActivity from .for_each_activity import ForEachActivity @@ -732,6 +811,13 @@ from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties from .managed_integration_runtime import ManagedIntegrationRuntime from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress + from .ssis_variable import SsisVariable + from .ssis_environment import SsisEnvironment + from .ssis_parameter import SsisParameter + from .ssis_package import SsisPackage + from .ssis_environment_reference import SsisEnvironmentReference + from .ssis_project import SsisProject + from .ssis_folder import SsisFolder from .ssis_object_metadata import SsisObjectMetadata from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData @@ -764,6 +850,7 @@ DayOfWeek, DaysOfWeek, RecurrenceFrequency, + GoogleAdWordsAuthenticationType, SparkServerType, SparkThriftTransportProtocol, SparkAuthenticationType, @@ -780,8 +867,10 @@ SftpAuthenticationType, FtpAuthenticationType, HttpAuthenticationType, + RestServiceAuthenticationType, MongoDbAuthenticationType, ODataAuthenticationType, + ODataAadServicePrincipalCredentialType, TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, @@ -798,6 +887,7 @@ CopyBehaviorType, PolybaseSettingsRejectType, SapCloudForCustomerSinkWriteBehavior, + WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, @@ -896,6 +986,10 @@ 'ScheduleTrigger', 'MultiplePipelineTrigger', 'AzureFunctionLinkedService', + 'AzureDataExplorerLinkedService', + 'GoogleAdWordsLinkedService', + 'OracleServiceCloudLinkedService', + 'DynamicsAXLinkedService', 'ResponsysLinkedService', 'AzureDatabricksLinkedService', 'AzureDataLakeAnalyticsLinkedService', @@ -939,10 +1033,16 @@ 'CustomDataSourceLinkedService', 'AmazonRedshiftLinkedService', 'AmazonS3LinkedService', + 'RestServiceLinkedService', + 'SapOpenHubLinkedService', 'SapEccLinkedService', 'SapCloudForCustomerLinkedService', 'SalesforceLinkedService', + 'Office365LinkedService', + 'AzureBlobFSLinkedService', 'AzureDataLakeStoreLinkedService', + 'CosmosDbMongoDbApiLinkedService', + 'MongoDbV2LinkedService', 'MongoDbLinkedService', 'CassandraLinkedService', 'WebClientCertificateAuthentication', @@ -973,6 +1073,10 @@ 'AzureTableStorageLinkedService', 'AzureBlobStorageLinkedService', 'AzureStorageLinkedService', + 'GoogleAdWordsObjectDataset', + 'AzureDataExplorerTableDataset', + 'OracleServiceCloudObjectDataset', + 'DynamicsAXResourceDataset', 'ResponsysObjectDataset', 'SalesforceMarketingCloudObjectDataset', 'VerticaTableDataset', @@ -1017,7 +1121,9 @@ 'HttpDataset', 'AzureSearchIndexDataset', 'WebTableDataset', + 'RestResourceDataset', 'SqlServerTableDataset', + 'SapOpenHubTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', 'SalesforceObjectDataset', @@ -1025,8 +1131,12 @@ 'AzureMySqlTableDataset', 'OracleTableDataset', 'ODataResourceDataset', + 'CosmosDbMongoDbApiCollectionDataset', + 'MongoDbV2CollectionDataset', 'MongoDbCollectionDataset', 'FileShareDataset', + 'Office365Dataset', + 'AzureBlobFSDataset', 'AzureDataLakeStoreDataset', 'DynamicsEntityDataset', 'DocumentDbCollectionDataset', @@ -1051,6 +1161,9 @@ 'WebActivity', 'RedshiftUnloadSettings', 'AmazonRedshiftSource', + 'GoogleAdWordsSource', + 'OracleServiceCloudSource', + 'DynamicsAXSource', 'ResponsysSource', 'SalesforceMarketingCloudSource', 'VerticaSource', @@ -1082,11 +1195,17 @@ 'AzurePostgreSqlSource', 'AmazonMWSSource', 'HttpSource', + 'AzureBlobFSSource', 'AzureDataLakeStoreSource', + 'Office365Source', + 'MongoDbCursorMethodsProperties', + 'CosmosDbMongoDbApiSource', + 'MongoDbV2Source', 'MongoDbSource', 'CassandraSource', 'WebSource', 'OracleSource', + 'AzureDataExplorerSource', 'AzureMySqlSource', 'DistcpSettings', 'HdfsSource', @@ -1094,6 +1213,8 @@ 'SqlDWSource', 'StoredProcedureParameter', 'SqlSource', + 'RestSource', + 'SapOpenHubSource', 'SapEccSource', 'SapCloudForCustomerSource', 'SalesforceSource', @@ -1123,10 +1244,13 @@ 'StagingSettings', 'TabularTranslator', 'CopyTranslator', + 'CosmosDbMongoDbApiSink', 'SalesforceSink', + 'AzureDataExplorerSink', 'DynamicsSink', 'OdbcSink', 'AzureSearchIndexSink', + 'AzureBlobFSSink', 'AzureDataLakeStoreSink', 'OracleSink', 'PolybaseSettings', @@ -1141,9 +1265,11 @@ 'CopySink', 'CopyActivity', 'ExecutionActivity', + 'WebHookActivity', 'AppendVariableActivity', 'SetVariableActivity', 'FilterActivity', + 'ValidationActivity', 'UntilActivity', 'WaitActivity', 'ForEachActivity', @@ -1168,6 +1294,13 @@ 'IntegrationRuntimeComputeProperties', 'ManagedIntegrationRuntime', 'IntegrationRuntimeNodeIpAddress', + 'SsisVariable', + 'SsisEnvironment', + 'SsisParameter', + 'SsisPackage', + 'SsisEnvironmentReference', + 'SsisProject', + 'SsisFolder', 'SsisObjectMetadata', 'SsisObjectMetadataListResponse', 'IntegrationRuntimeNodeMonitoringData', @@ -1199,6 +1332,7 @@ 'DayOfWeek', 'DaysOfWeek', 'RecurrenceFrequency', + 'GoogleAdWordsAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SparkAuthenticationType', @@ -1215,8 +1349,10 @@ 'SftpAuthenticationType', 'FtpAuthenticationType', 'HttpAuthenticationType', + 'RestServiceAuthenticationType', 'MongoDbAuthenticationType', 'ODataAuthenticationType', + 'ODataAadServicePrincipalCredentialType', 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', @@ -1233,6 +1369,7 @@ 'CopyBehaviorType', 'PolybaseSettingsRejectType', 'SapCloudForCustomerSinkWriteBehavior', + 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py index 4531b28777c6..b1e5ed533bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py index 421c20dc2d4a..a8db63933154 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py index 1cabba2201c7..f9d034e610d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class AmazonMWSSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py index 895281f9af51..9ef7f5b30244 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class AmazonMWSSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py index a85e73b458ae..4272b28c13f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py index 7912ad040946..3b84583c6c86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py index 0fa9a82ff9db..d4fdfa4aa2ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -48,6 +52,7 @@ class AmazonRedshiftSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py index 9542e56e4850..9b34b2ef5b97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -48,13 +52,14 @@ class AmazonRedshiftSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py index d6262a013b0d..e91a5ba26131 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset): :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 @@ -82,6 +88,8 @@ class AmazonS3Dataset(Dataset): 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } @@ -92,6 +100,8 @@ def __init__(self, **kwargs): self.key = kwargs.get('key', None) self.prefix = kwargs.get('prefix', None) self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.compression = kwargs.get('compression', None) self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py index 3936e9646a09..d84ae48b2a46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset): :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 @@ -82,16 +88,20 @@ class AmazonS3Dataset(Dataset): 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.bucket_name = bucket_name self.key = key self.prefix = prefix self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py index c9ff7261d915..250518c1a7ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService): :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -59,6 +64,7 @@ class AmazonS3LinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -66,5 +72,6 @@ def __init__(self, **kwargs): super(AmazonS3LinkedService, self).__init__(**kwargs) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py index 044e8bc299cf..8d136bb71fc0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService): :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -59,12 +64,14 @@ class AmazonS3LinkedService(LinkedService): 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.access_key_id = access_key_id self.secret_access_key = secret_access_key + self.service_url = service_url self.encrypted_credential = encrypted_credential self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py index 2fcf33e8d0c8..986023308e23 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py index 63724f76f13f..e7d33dfb342a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py index c3f4ffc118ba..01814cf8f9a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset): :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -76,6 +82,8 @@ class AzureBlobDataset(Dataset): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } @@ -85,6 +93,8 @@ def __init__(self, **kwargs): self.folder_path = kwargs.get('folder_path', None) self.table_root_location = kwargs.get('table_root_location', None) self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.compression = kwargs.get('compression', None) self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py index 7567e1fba9fb..706c39deb289 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset): :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. @@ -76,15 +82,19 @@ class AzureBlobDataset(Dataset): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.table_root_location = table_root_location self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py new file mode 100644 index 000000000000..0ef62ff7122f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py new file mode 100644 index 000000000000..82136a683fd3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py new file mode 100644 index 000000000000..262ce976227b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py new file mode 100644 index 000000000000..f0d555078bf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py new file mode 100644 index 000000000000..59e070c64fe8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. Possible + values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' + :type copy_behavior: str or + ~azure.mgmt.datafactory.models.CopyBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py new file mode 100644 index 000000000000..35ad6a97dbfe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. Possible + values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' + :type copy_behavior: str or + ~azure.mgmt.datafactory.models.CopyBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py new file mode 100644 index 000000000000..0252ffd5ba8f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py new file mode 100644 index 000000000000..5b512c1f334f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py index e4466c4ce9c9..5246e02ab9b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py index 4587e0c95dad..ba0a511532b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py new file mode 100644 index 000000000000..5e5a9f7560c6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py new file mode 100644 index 000000000000..3cd8ab9c3c19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py new file mode 100644 index 000000000000..5c204ab769e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py new file mode 100644 index 000000000000..e5cb67bc79b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py new file mode 100644 index 000000000000..2caaa517efd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py new file mode 100644 index 000000000000..55a6bc78ee04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py new file mode 100644 index 000000000000..594d22171f48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py new file mode 100644 index 000000000000..d36b0f39c2fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py index 73ec2b6f9de9..0381e1b1de65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -29,7 +29,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py index b6c4b993cae7..93250e2cef76 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py index e0299ba2bcad..de15057f78ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -43,8 +43,8 @@ class AzureDataLakeStoreDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param folder_path: Required. Path to the folder in the Azure Data Lake - Store. Type: string (or Expression with resultType string). + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -59,7 +59,6 @@ class AzureDataLakeStoreDataset(Dataset): _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'folder_path': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py index 62e761dc9695..d2df0ffebe7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py @@ -43,8 +43,8 @@ class AzureDataLakeStoreDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param folder_path: Required. Path to the folder in the Azure Data Lake - Store. Type: string (or Expression with resultType string). + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -59,7 +59,6 @@ class AzureDataLakeStoreDataset(Dataset): _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'folder_path': {'required': True}, } _attribute_map = { @@ -78,7 +77,7 @@ class AzureDataLakeStoreDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, folder_path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, file_name=None, format=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py index 0c39866887ef..f08e086cb500 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -29,7 +29,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py index 10e3b72e654e..7b8ab293c0cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index ceaabf438097..145c7c61358a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -34,6 +34,10 @@ class AzureDataLakeStoreSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,6 +56,7 @@ class AzureDataLakeStoreSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py index 449c7b0a2a3e..d3e16339fef2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -34,6 +34,10 @@ class AzureDataLakeStoreSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,11 +56,12 @@ class AzureDataLakeStoreSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py index 60a6599c8fbb..9d2046049a30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -27,6 +27,10 @@ class AzureDataLakeStoreSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,6 +47,7 @@ class AzureDataLakeStoreSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py index d228d787bff4..e1d883972220 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py @@ -27,6 +27,10 @@ class AzureDataLakeStoreSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,11 +47,12 @@ class AzureDataLakeStoreSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, **kwargs) -> None: - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py index c036b299fff0..6cc4c12674cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -29,7 +29,7 @@ class AzureDatabricksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -64,6 +64,16 @@ class AzureDatabricksLinkedService(LinkedService): :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -92,6 +102,9 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -106,5 +119,8 @@ def __init__(self, **kwargs): self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py index 8060311a4e0d..6299dac1e3f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureDatabricksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -64,6 +64,16 @@ class AzureDatabricksLinkedService(LinkedService): :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -92,10 +102,13 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.domain = domain self.access_token = access_token @@ -106,5 +119,8 @@ def __init__(self, *, domain, access_token, additional_properties=None, connect_ self.new_cluster_spark_conf = new_cluster_spark_conf self.new_cluster_spark_env_vars = new_cluster_spark_env_vars self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py index 44917d8d23b9..2ed5b870a778 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -29,7 +29,7 @@ class AzureFunctionLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py index b6b0f9600da1..a1bfdbe8b6c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureFunctionLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py index c7ad622591ee..768f0d83ae93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -29,7 +29,7 @@ class AzureKeyVaultLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py index e13cf7fb527a..50f4a58a5a1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureKeyVaultLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py index a6a19be4069b..08dfec98a6bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -29,7 +29,7 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py index 0fff3cea9b8a..c77a692adc03 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py index 64a072f1f38b..aedbdbb73eb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -29,7 +29,7 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py index dcf4861da573..57692275f564 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py index 7409be73bd09..823336432567 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -27,6 +27,10 @@ class AzureMySqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,6 +46,7 @@ class AzureMySqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py index 4e1d35981f78..7030738d2615 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py @@ -27,6 +27,10 @@ class AzureMySqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,11 +46,12 @@ class AzureMySqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py index 89c9b29cdcde..92359d6d6a10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -29,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py index e885498530ed..47f8f17980f8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py index 816e066ecebb..e0cd62fd8028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -27,6 +27,10 @@ class AzurePostgreSqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class AzurePostgreSqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py index 2af53cf91da2..0362b0dca390 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py @@ -27,6 +27,10 @@ class AzurePostgreSqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class AzurePostgreSqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py index 5ecb911fb94a..9f3a63db4978 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -34,6 +34,10 @@ class AzureQueueSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +52,7 @@ class AzureQueueSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py index debc14c0c7e1..db2fb60ddb1e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py @@ -34,6 +34,10 @@ class AzureQueueSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -48,9 +52,10 @@ class AzureQueueSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index c09cd94bfb51..9aae64af8da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -34,6 +34,10 @@ class AzureSearchIndexSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -52,6 +56,7 @@ class AzureSearchIndexSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 9ed48b36a588..3cd887a2512c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -34,6 +34,10 @@ class AzureSearchIndexSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -52,11 +56,12 @@ class AzureSearchIndexSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py index 18979ed87ca0..782799cd5b28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -29,7 +29,7 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py index 6cc3cdc98b89..8589c3aead91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py index 68ad549ed733..0da66637a04f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -29,7 +29,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py index afd58ae43354..dbcf6c88b134 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py index d4aa961cb424..cc7c9d58d19f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -29,7 +29,7 @@ class AzureSqlDWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py index a78551dff273..5c75f3904b37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureSqlDWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py index 711b09a80004..202dd7229b90 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py index 428fb82e871a..4fac19b70849 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py index faba497cc734..3459c9ad3ba1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -34,6 +34,10 @@ class AzureTableSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -60,6 +64,7 @@ class AzureTableSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py index 630df4f1f606..a15247544879 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py @@ -34,6 +34,10 @@ class AzureTableSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -60,6 +64,7 @@ class AzureTableSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -67,8 +72,8 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py index f4046c989f4e..fa7ead73eaa9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -27,6 +27,10 @@ class AzureTableSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string @@ -46,6 +50,7 @@ class AzureTableSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py index 30ca05775f27..efbac5613219 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py @@ -27,6 +27,10 @@ class AzureTableSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string @@ -46,13 +50,14 @@ class AzureTableSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py index 152fae6368a6..c2a8c2498ea6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -29,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py index 533ad3509483..8d4e62c4f3e6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py @@ -29,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py index 681cc44d278b..673d34167fed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -30,6 +30,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -64,6 +67,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py index 08d9c542f4af..fb65a420a2cd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py @@ -30,6 +30,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -64,6 +67,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, @@ -72,8 +76,8 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } - def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.blob_path_begins_with = blob_path_begins_with self.blob_path_ends_with = blob_path_ends_with self.events = events diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index fe90f5836faf..8a050cf9cc64 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -34,6 +34,10 @@ class BlobSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -61,6 +65,7 @@ class BlobSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py index 1d6ac96aff6e..8fca0ac5cacc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -34,6 +34,10 @@ class BlobSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -61,6 +65,7 @@ class BlobSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -68,8 +73,8 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py index f563d0af1e2d..ab4313a890cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -27,6 +27,10 @@ class BlobSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or @@ -49,6 +53,7 @@ class BlobSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py index 5b9dc775f069..78d90cc61e13 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py @@ -27,6 +27,10 @@ class BlobSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or @@ -49,14 +53,15 @@ class BlobSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py index 6abdece68966..4fd5b5c17100 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -30,6 +30,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -59,6 +62,7 @@ class BlobTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py index 2c80ac605368..cccffd881bfb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py @@ -30,6 +30,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -59,6 +62,7 @@ class BlobTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, @@ -66,8 +70,8 @@ class BlobTrigger(MultiplePipelineTrigger): 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } - def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.folder_path = folder_path self.max_concurrency = max_concurrency self.linked_service = linked_service diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py index 974ce49a1c62..ebba2be42028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -29,7 +29,7 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py index dbc74f10002f..f22f303cc61d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py @@ -29,7 +29,7 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index fdd0a228d001..e7ba96c18682 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -27,6 +27,10 @@ class CassandraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or @@ -53,6 +57,7 @@ class CassandraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index 323d85d1e742..bd95d158b868 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -27,6 +27,10 @@ class CassandraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or @@ -53,13 +57,14 @@ class CassandraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py index 7b85f1400ff6..04179d0d1f53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -29,7 +29,7 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py index 6e17a2c9cc8e..4411db6d2856 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py @@ -29,7 +29,7 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py index f8053415520c..11ae557c0cda 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -27,6 +27,10 @@ class ConcurSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ConcurSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py index db9104869417..ac8ae8fb5a91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py @@ -27,6 +27,10 @@ class ConcurSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ConcurSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py index 16581581786b..2242bc36beb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -16,9 +16,9 @@ class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, SetVariableActivity, - FilterActivity, UntilActivity, WaitActivity, ForEachActivity, - IfConditionActivity, ExecutePipelineActivity + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. @@ -52,7 +52,7 @@ class ControlActivity(Activity): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py index 739d8b9c311b..0aabd99d741f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py @@ -16,9 +16,9 @@ class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, SetVariableActivity, - FilterActivity, UntilActivity, WaitActivity, ForEachActivity, - IfConditionActivity, ExecutePipelineActivity + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. @@ -52,7 +52,7 @@ class ControlActivity(Activity): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index 9182efe2469a..f73d34fcb3ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -65,6 +65,8 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -96,6 +98,7 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -111,6 +114,7 @@ def __init__(self, **kwargs): self.data_integration_units = kwargs.get('data_integration_units', None) self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py index fd663bd71dc6..a02cd5d89e10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -65,6 +65,8 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -96,11 +98,12 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -111,6 +114,7 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules self.inputs = inputs self.outputs = outputs self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 58b55bf39bbc..6f714f7947d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -16,10 +16,11 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SalesforceSink, DynamicsSink, OdbcSink, - AzureSearchIndexSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, + AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, + AzureQueueSink, SapCloudForCustomerSink All required parameters must be populated in order to send to Azure. @@ -40,6 +41,10 @@ class CopySink(Model): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,11 +59,12 @@ class CopySink(Model): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'SalesforceSink': 'SalesforceSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} } def __init__(self, **kwargs): @@ -68,4 +74,5 @@ def __init__(self, **kwargs): self.write_batch_timeout = kwargs.get('write_batch_timeout', None) self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 02dfd30c931e..ea4f320e9bc6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -16,10 +16,11 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SalesforceSink, DynamicsSink, OdbcSink, - AzureSearchIndexSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, + AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, + AzureQueueSink, SapCloudForCustomerSink All required parameters must be populated in order to send to Azure. @@ -40,6 +41,10 @@ class CopySink(Model): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,18 +59,20 @@ class CopySink(Model): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'SalesforceSink': 'SalesforceSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 9a11107fc8e8..4f3da1e8cf85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -16,19 +16,21 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ResponsysSource, + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, - OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, - SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, - RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource + AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SapCloudForCustomerSource, SalesforceSource, RelationalSource, + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource All required parameters must be populated in order to send to Azure. @@ -42,6 +44,10 @@ class CopySource(Model): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,11 +60,12 @@ class CopySource(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} } def __init__(self, **kwargs): @@ -66,4 +73,5 @@ def __init__(self, **kwargs): self.additional_properties = kwargs.get('additional_properties', None) self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 7c1a96b2897a..eb439548481a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -16,19 +16,21 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ResponsysSource, + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, - OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, - SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, - RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource + AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SapCloudForCustomerSource, SalesforceSource, RelationalSource, + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource All required parameters must be populated in order to send to Azure. @@ -42,6 +44,10 @@ class CopySource(Model): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -54,16 +60,18 @@ class CopySource(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py index ed9136eee5fe..6a8a462f6f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -29,7 +29,7 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py index 3b951a68a65a..57dab80e06b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py @@ -29,7 +29,7 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py new file mode 100644 index 000000000000..d86648eb5eee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py new file mode 100644 index 000000000000..de2650fa14b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py new file mode 100644 index 000000000000..f76e7c5f5a41 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py new file mode 100644 index 000000000000..2a72bfce4ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py new file mode 100644 index 000000000000..0d40b52add80 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py new file mode 100644 index 000000000000..5db1ee5c9d36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py new file mode 100644 index 000000000000..dae49c1d45eb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py new file mode 100644 index 000000000000..a4c869645973 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py index f5c02a071718..76e45648f941 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -29,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py index 1507d6ab7b32..afe336f666de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py @@ -29,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py index bfab638594a3..b355605417d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -27,6 +27,10 @@ class CouchbaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class CouchbaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py index cc661253a13d..1358fc20feba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py @@ -27,6 +27,10 @@ class CouchbaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class CouchbaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py index f7eceb72ff3b..01cfb7335d37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -52,6 +52,9 @@ class CustomActivity(ExecutionActivity): custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { @@ -74,6 +77,7 @@ class CustomActivity(ExecutionActivity): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__(self, **kwargs): @@ -83,4 +87,5 @@ def __init__(self, **kwargs): self.folder_path = kwargs.get('folder_path', None) self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py index b82ac57bca4d..bf8326f053f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py @@ -52,6 +52,9 @@ class CustomActivity(ExecutionActivity): custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { @@ -74,13 +77,15 @@ class CustomActivity(ExecutionActivity): 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, **kwargs) -> None: + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path self.reference_objects = reference_objects self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py index 4bc3a2863fc3..db14a05e7ad1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -29,7 +29,7 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py index 2ec05f7a32d9..f7633ee28cbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py @@ -29,7 +29,7 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py index 8a6a8ac662a8..a242309c3fd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -43,14 +43,13 @@ class CustomDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param type_properties: Required. Custom dataset properties. + :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'type_properties': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py index da681e8360b8..c00dae2b2c56 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py @@ -43,14 +43,13 @@ class CustomDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param type_properties: Required. Custom dataset properties. + :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'type_properties': {'required': True}, } _attribute_map = { @@ -66,7 +65,7 @@ class CustomDataset(Dataset): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, *, linked_service_name, type_properties, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type_properties = type_properties self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 2992964b1799..b372cb91d8ef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -77,6 +77,8 @@ class RunQueryFilterOperand(str, Enum): activity_type = "ActivityType" trigger_name = "TriggerName" trigger_run_timestamp = "TriggerRunTimestamp" + run_group_id = "RunGroupId" + latest_only = "LatestOnly" class RunQueryFilterOperator(str, Enum): @@ -158,6 +160,12 @@ class RecurrenceFrequency(str, Enum): year = "Year" +class GoogleAdWordsAuthenticationType(str, Enum): + + service_authentication = "ServiceAuthentication" + user_authentication = "UserAuthentication" + + class SparkServerType(str, Enum): shark_server = "SharkServer" @@ -267,6 +275,14 @@ class HttpAuthenticationType(str, Enum): client_certificate = "ClientCertificate" +class RestServiceAuthenticationType(str, Enum): + + anonymous = "Anonymous" + basic = "Basic" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + class MongoDbAuthenticationType(str, Enum): basic = "Basic" @@ -277,6 +293,15 @@ class ODataAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" + windows = "Windows" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + +class ODataAadServicePrincipalCredentialType(str, Enum): + + service_principal_key = "ServicePrincipalKey" + service_principal_cert = "ServicePrincipalCert" class TeradataAuthenticationType(str, Enum): @@ -395,6 +420,11 @@ class SapCloudForCustomerSinkWriteBehavior(str, Enum): update = "Update" +class WebHookActivityMethod(str, Enum): + + post = "POST" + + class IntegrationRuntimeType(str, Enum): managed = "Managed" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index de812815bd26..af540b1e6429 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -17,26 +17,29 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ResponsysObjectDataset, - SalesforceMarketingCloudObjectDataset, VerticaTableDataset, - NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, - SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, - ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, - PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, - MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, - ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, - HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, - EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, - ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, - HttpDataset, AzureSearchIndexDataset, WebTableDataset, - SqlServerTableDataset, SapEccResourceDataset, + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, RestResourceDataset, SqlServerTableDataset, + SapOpenHubTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, MongoDbCollectionDataset, FileShareDataset, - AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, AmazonS3Dataset + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -86,7 +89,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 9538e6105a8f..adc64b228236 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -17,26 +17,29 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ResponsysObjectDataset, - SalesforceMarketingCloudObjectDataset, VerticaTableDataset, - NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, - SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, - ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, - PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, - MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, - ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, - HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, - EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, - ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, - HttpDataset, AzureSearchIndexDataset, WebTableDataset, - SqlServerTableDataset, SapEccResourceDataset, + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, RestResourceDataset, SqlServerTableDataset, + SapOpenHubTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, MongoDbCollectionDataset, FileShareDataset, - AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, AmazonS3Dataset + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -86,7 +89,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py index 9349bbcba5e0..d163d2b93c18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -29,7 +29,7 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py index d339860c3229..44d784fa9bde 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py @@ -29,7 +29,7 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 43253aff51d0..25e80ee45466 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -34,6 +34,10 @@ class DocumentDbCollectionSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -51,6 +55,7 @@ class DocumentDbCollectionSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py index 5377d4ed5aa5..111897036215 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -34,6 +34,10 @@ class DocumentDbCollectionSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -51,11 +55,12 @@ class DocumentDbCollectionSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py index ac6bd77955c8..9fdd23f2795f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -27,6 +27,10 @@ class DocumentDbCollectionSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType @@ -45,6 +49,7 @@ class DocumentDbCollectionSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py index 9c20bfbfa132..9e0bf6382b04 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py @@ -27,6 +27,10 @@ class DocumentDbCollectionSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType @@ -45,13 +49,14 @@ class DocumentDbCollectionSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py index 52ad5888f5f2..c5428ace02a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -29,7 +29,7 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py index b556d7e92be3..5fb0cb25ecdb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py @@ -29,7 +29,7 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py index c2e390308b81..9a3391f27786 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -27,6 +27,10 @@ class DrillSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class DrillSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py index ea67bbef64fb..313183abab83 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py @@ -27,6 +27,10 @@ class DrillSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class DrillSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py new file mode 100644 index 000000000000..5ff0b150718b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py new file mode 100644 index 000000000000..79d3a34ba313 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py new file mode 100644 index 000000000000..233c4c99d4df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py new file mode 100644 index 000000000000..788c9084ee9b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py new file mode 100644 index 000000000000..619bad0f75c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py new file mode 100644 index 000000000000..7679e68bae7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py index 50ec75f79523..c925033d1240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -29,7 +29,7 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py index 4971dabfba16..07c028ff2477 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py @@ -29,7 +29,7 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 7eb8be963583..45bac7b52064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -37,6 +37,10 @@ class DynamicsSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -59,6 +63,7 @@ class DynamicsSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index 2e2a64169797..5f736f9cf658 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -37,6 +37,10 @@ class DynamicsSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -59,6 +63,7 @@ class DynamicsSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -66,7 +71,7 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py index 09c04a8d09a6..d38f96fee911 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -27,6 +27,10 @@ class DynamicsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in @@ -43,6 +47,7 @@ class DynamicsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py index 9c921cf40f3a..12d83625bc6a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py @@ -27,6 +27,10 @@ class DynamicsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in @@ -43,11 +47,12 @@ class DynamicsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py index ae14064ae523..6249c2e2334b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -29,7 +29,7 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py index 1c6bd97ecf9d..623d798036a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py @@ -29,7 +29,7 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py index 694282ebcd8a..f016140189f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -27,6 +27,10 @@ class EloquaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class EloquaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py index c9d96711743f..d200ff32fd9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py @@ -27,6 +27,10 @@ class EloquaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class EloquaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py index c3c90f30935d..ffced5c2e689 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -29,7 +29,7 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py index a9793d5b44fc..ec6fe58bb3a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py @@ -29,7 +29,7 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py index a851956ea319..6874f4c08929 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -49,6 +49,12 @@ class FileShareDataset(Dataset): :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of @@ -76,6 +82,8 @@ class FileShareDataset(Dataset): 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -85,6 +93,8 @@ def __init__(self, **kwargs): super(FileShareDataset, self).__init__(**kwargs) self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.format = kwargs.get('format', None) self.file_filter = kwargs.get('file_filter', None) self.compression = kwargs.get('compression', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py index 675583ae2f2c..19e88a264e12 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py @@ -49,6 +49,12 @@ class FileShareDataset(Dataset): :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of @@ -76,15 +82,19 @@ class FileShareDataset(Dataset): 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end self.format = format self.file_filter = file_filter self.compression = compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 9f33cea7a261..75baab87456e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -34,6 +34,10 @@ class FileSystemSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,6 +56,7 @@ class FileSystemSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py index a940e39878f8..92388128726e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -34,6 +34,10 @@ class FileSystemSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. Possible @@ -52,11 +56,12 @@ class FileSystemSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py index 1bbf97f1b31d..2986b1848153 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -27,6 +27,10 @@ class FileSystemSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,6 +47,7 @@ class FileSystemSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py index 6db0072329d4..0598490ca51c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py @@ -27,6 +27,10 @@ class FileSystemSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -43,11 +47,12 @@ class FileSystemSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, **kwargs) -> None: - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py index 03a09f89c13e..e649ca56e37c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -29,7 +29,7 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py index 21fd1168165f..b38ad1c03f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py @@ -29,7 +29,7 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py new file mode 100644 index 000000000000..c460dd95c380 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py new file mode 100644 index 000000000000..dfb3bc07e69f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py new file mode 100644 index 000000000000..92b901b774ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py new file mode 100644 index 000000000000..e1272f978b8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py new file mode 100644 index 000000000000..8699057abe09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py new file mode 100644 index 000000000000..995d5324670b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py index c9fa8239b452..45a535b95d43 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -29,7 +29,7 @@ class GoogleBigQueryLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py index a8582aca98b5..146674a85531 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py @@ -29,7 +29,7 @@ class GoogleBigQueryLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py index c0598d88a6ed..3a28d2563a8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -27,6 +27,10 @@ class GoogleBigQuerySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class GoogleBigQuerySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py index eb5727bd43a5..49364b4d0e3f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py @@ -27,6 +27,10 @@ class GoogleBigQuerySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class GoogleBigQuerySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py index d3de7ccab502..57913f779ca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -29,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py index 886d38718ecd..bd707a5e85c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py @@ -29,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py index a463ff2c3482..086f12419f4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -27,6 +27,10 @@ class GreenplumSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class GreenplumSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py index 6a373bf9d6ae..8b789deb43da 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py @@ -27,6 +27,10 @@ class GreenplumSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class GreenplumSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py index 4d7f3bf5ccb6..b6affd5caa0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -29,7 +29,7 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py index 7963b3fc643c..a8823e2e8937 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py @@ -29,7 +29,7 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py index cc2c4fd1a843..eb6e3f1789bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -27,6 +27,10 @@ class HBaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HBaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py index c17d8cf07003..b2680e95c212 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py @@ -27,6 +27,10 @@ class HBaseSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HBaseSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py index b18a138a855e..810525342d82 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -29,7 +29,7 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -55,6 +55,10 @@ class HDInsightLinkedService(LinkedService): :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object """ _validation = { @@ -76,6 +80,7 @@ class HDInsightLinkedService(LinkedService): 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } def __init__(self, **kwargs): @@ -87,4 +92,5 @@ def __init__(self, **kwargs): self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py index 769cf031a403..5c384f7d6288 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -29,7 +29,7 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -55,6 +55,10 @@ class HDInsightLinkedService(LinkedService): :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object """ _validation = { @@ -76,9 +80,10 @@ class HDInsightLinkedService(LinkedService): 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, **kwargs) -> None: + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_uri = cluster_uri self.user_name = user_name @@ -87,4 +92,5 @@ def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, self.hcatalog_linked_service_name = hcatalog_linked_service_name self.encrypted_credential = encrypted_credential self.is_esp_enabled = is_esp_enabled + self.file_system = file_system self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py index bd84aabc5012..d386aac9d9aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -29,7 +29,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -136,6 +136,14 @@ class HDInsightOnDemandLinkedService(LinkedService): cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object """ _validation = { @@ -187,6 +195,8 @@ class HDInsightOnDemandLinkedService(LinkedService): 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } def __init__(self, **kwargs): @@ -222,4 +232,6 @@ def __init__(self, **kwargs): self.data_node_size = kwargs.get('data_node_size', None) self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py index 5566a022bda2..178585c9b51d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py @@ -29,7 +29,7 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -136,6 +136,14 @@ class HDInsightOnDemandLinkedService(LinkedService): cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object """ _validation = { @@ -187,9 +195,11 @@ class HDInsightOnDemandLinkedService(LinkedService): 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } - def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, **kwargs) -> None: + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_size = cluster_size self.time_to_live = time_to_live @@ -222,4 +232,6 @@ def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, self.data_node_size = data_node_size self.zookeeper_node_size = zookeeper_node_size self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py index ab26ae10fe8c..b527f05a7e2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -29,7 +29,7 @@ class HdfsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py index 3b854d945e27..e004701e1da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py @@ -29,7 +29,7 @@ class HdfsLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py index 1322a0e68cea..be50590f6c32 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -27,6 +27,10 @@ class HdfsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -45,6 +49,7 @@ class HdfsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py index 34b194f92d64..3c60cab46289 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py @@ -27,6 +27,10 @@ class HdfsSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read @@ -45,13 +49,14 @@ class HdfsSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, recursive=None, distcp_settings=None, **kwargs) -> None: - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.distcp_settings = distcp_settings self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py index 57b40c30304a..c54c1393d56e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -29,7 +29,7 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py index 2f742d72594c..611d30ecb781 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py @@ -29,7 +29,7 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py index ad7cd5dc5a8a..3af88c3280e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -27,6 +27,10 @@ class HiveSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HiveSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py index 7dc54994b25a..6c09191b8c1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py @@ -27,6 +27,10 @@ class HiveSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HiveSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py index 86d6a072925e..6232bc45fee4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -29,7 +29,7 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py index bd4f03006513..7f70adb08425 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py @@ -29,7 +29,7 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py index 8c4a6ef6b8d7..ae131aa16c8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -27,6 +27,10 @@ class HttpSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to @@ -45,6 +49,7 @@ class HttpSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py index 78bfe7216da6..df339fc3aef7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py @@ -27,6 +27,10 @@ class HttpSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to @@ -45,11 +49,12 @@ class HttpSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, http_request_timeout=None, **kwargs) -> None: - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.http_request_timeout = http_request_timeout self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py index 08af04633c12..3d0d6cb3a6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -29,7 +29,7 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py index 93f66cd8e17b..272d613e9cd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py @@ -29,7 +29,7 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py index bca6b525860c..b4b4c618c33e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -27,6 +27,10 @@ class HubspotSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class HubspotSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py index cfc2d2d815b5..a29811342ce0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py @@ -27,6 +27,10 @@ class HubspotSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class HubspotSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py index fdc471ea225f..a704852652db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -29,7 +29,7 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py index 9d79f13b9708..55b2e0c861d7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py @@ -29,7 +29,7 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py index dec8e843d0c6..9e27dbdb6266 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -27,6 +27,10 @@ class ImpalaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ImpalaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py index 5bdb3391c2fc..f7dc4016d020 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py @@ -27,6 +27,10 @@ class ImpalaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ImpalaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py index d8b9a62fc878..517cdd63caa5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -29,7 +29,7 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py index 69606ee7cfcf..82dc8d578da3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py @@ -29,7 +29,7 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py index 7bb6a8649b8f..709da0ce1205 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -27,6 +27,10 @@ class JiraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class JiraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py index 1a19ed99c55a..c958c8351bb3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py @@ -27,6 +27,10 @@ class JiraSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class JiraSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 62f172fded76..3d4660d72e89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -18,34 +18,38 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - AzureDataLakeStoreLinkedService, MongoDbLinkedService, - CassandraLinkedService, WebLinkedService, ODataLinkedService, - HdfsLinkedService, OdbcLinkedService, AzureMLLinkedService, - TeradataLinkedService, Db2LinkedService, SybaseLinkedService, - PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, - OracleLinkedService, FileServerLinkedService, HDInsightLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, + ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, + HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, + EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, + ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, + SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, + FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, + CustomDataSourceLinkedService, AmazonRedshiftLinkedService, + AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, + SapEccLinkedService, SapCloudForCustomerLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -61,7 +65,7 @@ class LinkedService(Model): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -81,7 +85,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index ff4bb8c7605d..eadf4030e132 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -18,34 +18,38 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - AzureDataLakeStoreLinkedService, MongoDbLinkedService, - CassandraLinkedService, WebLinkedService, ODataLinkedService, - HdfsLinkedService, OdbcLinkedService, AzureMLLinkedService, - TeradataLinkedService, Db2LinkedService, SybaseLinkedService, - PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, - OracleLinkedService, FileServerLinkedService, HDInsightLinkedService, - DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, - AzureBatchLinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, + ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, + HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, + EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, + ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, + SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, + FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, + CustomDataSourceLinkedService, AmazonRedshiftLinkedService, + AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, + SapEccLinkedService, SapCloudForCustomerLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -61,7 +65,7 @@ class LinkedService(Model): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -81,7 +85,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py index 5fb8974f28db..9d65437b5daa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -29,7 +29,7 @@ class MagentoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py index 420656103983..74de1573118b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py @@ -29,7 +29,7 @@ class MagentoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py index 679ba2a0669e..df49fe63a544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -27,6 +27,10 @@ class MagentoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MagentoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py index a01cf80a969a..15efcc12a054 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py @@ -27,6 +27,10 @@ class MagentoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MagentoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py index 0a98a04138dc..3bbe048d4877 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -29,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py index ef1114660ad7..475284d56038 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py @@ -29,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py index 96b7116cd3ac..a744c1c5ff8f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -27,6 +27,10 @@ class MariaDBSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MariaDBSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py index 1dbb6f327d04..472877b8f0bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py @@ -27,6 +27,10 @@ class MariaDBSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MariaDBSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py index 432676824a75..2a9e76446122 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -29,7 +29,7 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py index b4e360931809..dc326f24acd5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py @@ -29,7 +29,7 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py index 4867951baae7..6d2061ef0dee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -27,6 +27,10 @@ class MarketoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class MarketoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py index 52c16eae0437..573dc0439754 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py @@ -27,6 +27,10 @@ class MarketoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class MarketoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py new file mode 100644 index 000000000000..a2d2127d1397 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py new file mode 100644 index 000000000000..e1e3f50d1539 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py index 49d53510f7fd..76d162b0ff70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -29,7 +29,7 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py index c1d96a5465b9..95308b6ea8f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py @@ -29,7 +29,7 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py index b9f0be6b97d3..3da4b931f5e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -27,6 +27,10 @@ class MongoDbSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: @@ -42,6 +46,7 @@ class MongoDbSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py index b4f01d8d7ffb..ab3e5b6e0cc9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py @@ -27,6 +27,10 @@ class MongoDbSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: @@ -42,11 +46,12 @@ class MongoDbSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py new file mode 100644 index 000000000000..17089373d4c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py new file mode 100644 index 000000000000..ad1e5c538645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py new file mode 100644 index 000000000000..bb29fc767420 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py new file mode 100644 index 000000000000..d1388ce797a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py new file mode 100644 index 000000000000..295b74228b9a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py new file mode 100644 index 000000000000..872b060a49bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py index dd279ab6baa3..1be28aa1b6ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -34,6 +34,9 @@ class MultiplePipelineTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -50,6 +53,7 @@ class MultiplePipelineTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py index 3400431e49e2..206ab74ef419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py @@ -34,6 +34,9 @@ class MultiplePipelineTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -50,6 +53,7 @@ class MultiplePipelineTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } @@ -58,7 +62,7 @@ class MultiplePipelineTrigger(Trigger): 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } - def __init__(self, *, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipelines = pipelines self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py index 542fb13b7a37..ec85b0136714 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -29,7 +29,7 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py index cd87d5e7e3b5..b8038df22fd6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py index 319a68efddc5..5d94bdecaf62 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -29,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py index 6c3b607d60dc..2fcc288fd5b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py @@ -29,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py index 0c08b1440614..caf73f9ef81d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -27,6 +27,10 @@ class NetezzaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class NetezzaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py index 2b4c38f708ee..101a1f26a74d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -27,6 +27,10 @@ class NetezzaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class NetezzaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py index 9a7edca9ddb1..01db8d71e924 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -29,7 +29,7 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -37,7 +37,8 @@ class ODataLinkedService(LinkedService): (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous' + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or @@ -45,6 +46,38 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -67,6 +100,13 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -76,5 +116,12 @@ def __init__(self, **kwargs): self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py index 688bb4e4ffda..fcf2d8bb9819 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py @@ -29,7 +29,7 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str @@ -37,7 +37,8 @@ class ODataLinkedService(LinkedService): (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous' + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or @@ -45,6 +46,38 @@ class ODataLinkedService(LinkedService): :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -67,14 +100,28 @@ class ODataLinkedService(LinkedService): 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.authentication_type = authentication_type self.user_name = user_name self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password self.encrypted_credential = encrypted_credential self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py index 43559b76e0e0..53d21dee2def 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -29,7 +29,7 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py index e0147881f3d0..2e376d23c67a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py @@ -29,7 +29,7 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py index 4598952cb21b..ced7e1dbd9e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -34,6 +34,10 @@ class OdbcSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -51,6 +55,7 @@ class OdbcSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py index 430329bdf2b9..9a181f8df7e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py @@ -34,6 +34,10 @@ class OdbcSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -51,11 +55,12 @@ class OdbcSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py new file mode 100644 index 000000000000..baa90666d669 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py new file mode 100644 index 000000000000..5517f7daf9e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py new file mode 100644 index 000000000000..2dc98897482a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py new file mode 100644 index 000000000000..5a69c0d895fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py new file mode 100644 index 000000000000..8dff7a01ccaa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py new file mode 100644 index 000000000000..25ae6340ae01 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py index 5485151adb1f..19f715dfd9e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -29,7 +29,7 @@ class OracleLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py index 80b0ed1176ff..a46f0463afb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py @@ -29,7 +29,7 @@ class OracleLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py new file mode 100644 index 000000000000..44ce000868b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..8732e2e82ca0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py new file mode 100644 index 000000000000..35ce3439d8a0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..a478e1abc828 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py new file mode 100644 index 000000000000..f42291941393 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py new file mode 100644 index 000000000000..1fa5d6eb3748 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py index fa0e11f57553..1f6c747c49db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -34,6 +34,10 @@ class OracleSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -51,6 +55,7 @@ class OracleSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py index a6b666d31ed7..3a571c66732a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py @@ -34,6 +34,10 @@ class OracleSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -51,11 +55,12 @@ class OracleSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 3f74cf83ee7a..12b3aa31353f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -27,6 +27,10 @@ class OracleSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or @@ -46,6 +50,7 @@ class OracleSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 89252615e6e5..43afe27fda2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -27,6 +27,10 @@ class OracleSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or @@ -46,13 +50,14 @@ class OracleSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py index 4af8faaca8db..af51100cd88e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -43,15 +43,14 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the on-premises Oracle - database. Type: string (or Expression with resultType string). + :param table_name: The table name of the on-premises Oracle database. + Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py index aaa1291c8f76..563371653de8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -43,15 +43,14 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the on-premises Oracle - database. Type: string (or Expression with resultType string). + :param table_name: The table name of the on-premises Oracle database. + Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class OracleTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py index 190fc45985d3..d7ae0bc075e7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -29,7 +29,7 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py index 832b0dff257b..c11cda7a52f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py @@ -29,7 +29,7 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py index 5bb73029d10c..94cdbccae6ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -27,6 +27,10 @@ class PaypalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PaypalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py index 6a9dcce16a2d..05730d0ae067 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py @@ -27,6 +27,10 @@ class PaypalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PaypalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py index b9d16bc32c56..308a8e4cf592 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -29,7 +29,7 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py index aeb89e4fdd4a..de8210c2cc89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py @@ -29,7 +29,7 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py index daad6ec41c31..30171c6177ff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -27,6 +27,10 @@ class PhoenixSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PhoenixSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py index 619e7220dd09..1384f59e1aa4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py @@ -27,6 +27,10 @@ class PhoenixSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PhoenixSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py index 3ae4beb48ff1..a2407bd9835f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -23,6 +23,12 @@ class PipelineRun(Model): :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair @@ -47,6 +53,8 @@ class PipelineRun(Model): _validation = { 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'invoked_by': {'readonly': True}, @@ -61,6 +69,8 @@ class PipelineRun(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, @@ -76,6 +86,8 @@ def __init__(self, **kwargs): super(PipelineRun, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.run_id = None + self.run_group_id = None + self.is_latest = None self.pipeline_name = None self.parameters = None self.invoked_by = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py index aed5dd0466d2..33e0f23f24ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py @@ -23,6 +23,12 @@ class PipelineRun(Model): :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair @@ -47,6 +53,8 @@ class PipelineRun(Model): _validation = { 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'invoked_by': {'readonly': True}, @@ -61,6 +69,8 @@ class PipelineRun(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, @@ -76,6 +86,8 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: super(PipelineRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.run_id = None + self.run_group_id = None + self.is_latest = None self.pipeline_name = None self.parameters = None self.invoked_by = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py index af16c6c89cd2..f8ce5bd0803e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -29,7 +29,7 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py index 5e7e674a2447..0221aa620064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py @@ -29,7 +29,7 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py index abf4adde8515..21f18f07b262 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -29,7 +29,7 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py index fe178f62df4f..75ab99d5a58f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py @@ -29,7 +29,7 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py index 333a4e6dca9e..9b7274011265 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -27,6 +27,10 @@ class PrestoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class PrestoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py index ad16115ef8f3..47fe3eb5f790 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py @@ -27,6 +27,10 @@ class PrestoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class PrestoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py index c2ca123e5409..6353c1cda96a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -29,7 +29,7 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py index 7ba9f145c26e..be12fc5cfba5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py @@ -29,7 +29,7 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py index b8567cd772ed..cce0a026ae5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -27,6 +27,10 @@ class QuickBooksSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class QuickBooksSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py index b6bb7a260d1d..a00f35d4e1c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py @@ -27,6 +27,10 @@ class QuickBooksSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class QuickBooksSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py index 1dc8ff198eb8..2450f31222df 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -27,6 +27,10 @@ class RelationalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,6 +46,7 @@ class RelationalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py index 9e7a75043b8c..f88383cbd729 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py @@ -27,6 +27,10 @@ class RelationalSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -42,11 +46,12 @@ class RelationalSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py index e66cf2feebbc..8c5ca2d67f3c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -31,6 +31,9 @@ class RerunTumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. @@ -58,6 +61,7 @@ class RerunTumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py index eafc3b5743a0..4a7a20759c1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -31,6 +31,9 @@ class RerunTumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. @@ -58,6 +61,7 @@ class RerunTumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, @@ -65,8 +69,8 @@ class RerunTumblingWindowTrigger(Trigger): 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, parent_trigger=None, **kwargs) -> None: - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py index 9c1b8e4c3cbd..16d1af502787 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -29,7 +29,7 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py index 4c1997e6ab26..6d8a74a0a34b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py @@ -29,7 +29,7 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py index 1e1a9397a6ba..fd25b8e71377 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -27,6 +27,10 @@ class ResponsysSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ResponsysSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py index 3bfb9c19a2a7..8d5e4ac091f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py @@ -27,6 +27,10 @@ class ResponsysSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ResponsysSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py new file mode 100644 index 000000000000..9a5d41858e54 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py new file mode 100644 index 000000000000..99f39c97f373 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py new file mode 100644 index 000000000000..0fbb15654438 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py new file mode 100644 index 000000000000..9af9f609e52b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py new file mode 100644 index 000000000000..a8c7efca21e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py new file mode 100644 index 000000000000..cf0878e050e0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py index 63a4cddc063d..7d54150a6815 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -24,7 +24,7 @@ class RunQueryFilter(Model): TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp' + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py index fc95591801bd..814e7a4b499b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py @@ -24,7 +24,7 @@ class RunQueryFilter(Model): TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp' + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py index 5804e779d1ef..c644ac664831 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -29,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py index 9fa5287aa3b4..05fcea7a3990 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py @@ -29,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py index f3d2861576e4..93b4fcdb3d1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -29,7 +29,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py index 863b679398e1..d7e09e27a43f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py @@ -29,7 +29,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py index bf08fdaa88bf..09a0eca1758e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -27,6 +27,10 @@ class SalesforceMarketingCloudSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SalesforceMarketingCloudSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py index 0a3d26cfb43b..9b898af0c3a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py @@ -27,6 +27,10 @@ class SalesforceMarketingCloudSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SalesforceMarketingCloudSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 525aaccd49be..9a1291bd4bfe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -34,6 +34,10 @@ class SalesforceSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -65,6 +69,7 @@ class SalesforceSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index 6db44ebb4228..54a56618d01e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -34,6 +34,10 @@ class SalesforceSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -65,14 +69,15 @@ class SalesforceSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 8442a716c842..4f2590c3ab9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -27,6 +27,10 @@ class SalesforceSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -46,6 +50,7 @@ class SalesforceSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 9ebc65ddeec8..4441e92eaff3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -27,6 +27,10 @@ class SalesforceSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType @@ -46,13 +50,14 @@ class SalesforceSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py index 2fbb906559bc..a57164c7215d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -29,7 +29,7 @@ class SapBWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py index a1f6133e558d..92aef25dc215 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py @@ -29,7 +29,7 @@ class SapBWLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py index 5c9a6c2deb00..53d47ab8ae41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -29,7 +29,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py index 85c1100d01eb..9e47fd696503 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py @@ -29,7 +29,7 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index 05d98ec70eaa..e5a37858abb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -34,6 +34,10 @@ class SapCloudForCustomerSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -52,6 +56,7 @@ class SapCloudForCustomerSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index f3cd45263f3e..29f01fdd6891 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -34,6 +34,10 @@ class SapCloudForCustomerSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -52,11 +56,12 @@ class SapCloudForCustomerSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py index c8dedf91e188..561c1b342f93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -27,6 +27,10 @@ class SapCloudForCustomerSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". @@ -42,6 +46,7 @@ class SapCloudForCustomerSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py index ab5bddf21be3..e9dab6ad1899 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py @@ -27,6 +27,10 @@ class SapCloudForCustomerSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". @@ -42,11 +46,12 @@ class SapCloudForCustomerSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py index 4303b2f9cbca..0ca69242055f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -29,7 +29,7 @@ class SapEccLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py index 24490fb39a9a..7afd76b8fe09 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py @@ -29,7 +29,7 @@ class SapEccLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 84aa047e6d8a..6a2d17862d6b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -27,6 +27,10 @@ class SapEccSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or @@ -42,6 +46,7 @@ class SapEccSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py index f8993720428c..95a11500bd24 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -27,6 +27,10 @@ class SapEccSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or @@ -42,11 +46,12 @@ class SapEccSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query: str=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query: str=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index 0c2dbec28558..391bd79f8c28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -29,7 +29,7 @@ class SapHanaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py index c906d74d0c2b..bbf307d1bede 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -29,7 +29,7 @@ class SapHanaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py new file mode 100644 index 000000000000..bfe9c323d302 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py new file mode 100644 index 000000000000..eddc50b0f1c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py new file mode 100644 index 000000000000..ea98207a18cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py new file mode 100644 index 000000000000..9cfa4e5243b6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py new file mode 100644 index 000000000000..2682969c5016 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py new file mode 100644 index 000000000000..b06a53c10db3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py index eaebfb4c2553..b9ea331b8c6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -30,6 +30,9 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -49,6 +52,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py index 1fc148a81b29..f13f01c7fa13 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py @@ -30,6 +30,9 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. @@ -49,12 +52,13 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } - def __init__(self, *, recurrence, additional_properties=None, description: str=None, pipelines=None, **kwargs) -> None: - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, pipelines=pipelines, **kwargs) + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.recurrence = recurrence self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py index c433366826b8..4d42f575e769 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -29,7 +29,7 @@ class ServiceNowLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py index cdd9e8ebb718..b9d166f241d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py @@ -29,7 +29,7 @@ class ServiceNowLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py index 00068f5e5d32..16b10bb8de5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -27,6 +27,10 @@ class ServiceNowSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ServiceNowSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py index ffe72cb426e7..20d1a64d04d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py @@ -27,6 +27,10 @@ class ServiceNowSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ServiceNowSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py index 31a9d5524f36..aa4c535fc514 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -29,7 +29,7 @@ class SftpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py index 581e8f2a0f8e..7decd7781348 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py @@ -29,7 +29,7 @@ class SftpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py index b57922620ef8..ee5311dceb7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -29,7 +29,7 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py index 714de7f0ddf6..ea6189277552 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py @@ -29,7 +29,7 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py index 3006ede4633d..d4596976d459 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -27,6 +27,10 @@ class ShopifySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ShopifySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py index ec17bdce3e35..6b56edd62904 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py @@ -27,6 +27,10 @@ class ShopifySource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ShopifySource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py index 006311c492bb..4f9ab49a7bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -29,7 +29,7 @@ class SparkLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py index c5e20deef8e8..f6433b6ab187 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py @@ -29,7 +29,7 @@ class SparkLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py index 643a71610930..6d670c1c6b2a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -27,6 +27,10 @@ class SparkSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SparkSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py index ede7f9ed5e2b..8da01b0cd823 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py @@ -27,6 +27,10 @@ class SparkSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SparkSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py index ac12b6e55e59..6b4785b91ab4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -34,6 +34,10 @@ class SqlDWSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -58,6 +62,7 @@ class SqlDWSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py index 2b2d44cf16c6..efe63dcf788a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py @@ -34,6 +34,10 @@ class SqlDWSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -58,14 +62,15 @@ class SqlDWSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py index aa3f88a75938..1a020672f7c2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -27,6 +27,10 @@ class SqlDWSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or @@ -51,6 +55,7 @@ class SqlDWSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py index b74c004141d1..ae8fe605024f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py @@ -27,6 +27,10 @@ class SqlDWSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or @@ -51,14 +55,15 @@ class SqlDWSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py index 36230c046278..45d342212ea4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -29,7 +29,7 @@ class SqlServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py index fb446a12f601..3eb8c5063dc1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py @@ -29,7 +29,7 @@ class SqlServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 77692817100d..9c2ebd2b389f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -34,6 +34,10 @@ class SqlSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -60,6 +64,7 @@ class SqlSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py index 5aa68f696f16..115cc3a899e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -34,6 +34,10 @@ class SqlSink(CopySink): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -60,6 +64,7 @@ class SqlSink(CopySink): 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -67,8 +72,8 @@ class SqlSink(CopySink): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py index 3f374b19f072..bb31474b1f7c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -27,6 +27,10 @@ class SqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression @@ -50,6 +54,7 @@ class SqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py index ff39b6768a9f..dcad458fd4a6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py @@ -27,6 +27,10 @@ class SqlSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression @@ -50,14 +54,15 @@ class SqlSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py index 4e9df2b68e62..4edfc8b211f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -29,7 +29,7 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py index 0b9218efba97..40719f600a18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py @@ -29,7 +29,7 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py index 919abc0b19fa..f083df43f13a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -27,6 +27,10 @@ class SquareSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class SquareSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py index f7ba625398af..ec8a741d564c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py @@ -27,6 +27,10 @@ class SquareSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class SquareSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py new file mode 100644 index 000000000000..5dff9764e2a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py new file mode 100644 index 000000000000..43697ba62146 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py new file mode 100644 index 000000000000..e7d31d369392 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py new file mode 100644 index 000000000000..14cbfca99d4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py new file mode 100644 index 000000000000..350b0d92852b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py new file mode 100644 index 000000000000..d6483fda2c08 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py index ed7940124645..811075137f41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -15,6 +15,9 @@ class SsisObjectMetadata(Model): """SSIS object metadata. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. :param id: Metadata id. @@ -38,6 +41,10 @@ class SsisObjectMetadata(Model): 'type': {'key': 'type', 'type': 'str'}, } + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + def __init__(self, **kwargs): super(SsisObjectMetadata, self).__init__(**kwargs) self.id = kwargs.get('id', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py index b7373e36523c..45f7e15af4fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py @@ -15,6 +15,9 @@ class SsisObjectMetadata(Model): """SSIS object metadata. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. :param id: Metadata id. @@ -38,6 +41,10 @@ class SsisObjectMetadata(Model): 'type': {'key': 'type', 'type': 'str'}, } + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: super(SsisObjectMetadata, self).__init__(**kwargs) self.id = id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py new file mode 100644 index 000000000000..b04fc1138797 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py new file mode 100644 index 000000000000..e1e932e97ae6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py new file mode 100644 index 000000000000..c456af0bab48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py new file mode 100644 index 000000000000..6a4ff73768f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py new file mode 100644 index 000000000000..c29a36fb628e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py new file mode 100644 index 000000000000..11b95a644e2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py new file mode 100644 index 000000000000..73fda3b27967 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py new file mode 100644 index 000000000000..e709842ff465 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py index 634b4268bdb5..83de0e6f61f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -29,7 +29,7 @@ class SybaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py index 59b20a5f73cd..5b6cc0ce6ded 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py @@ -29,7 +29,7 @@ class SybaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py index fdd098ae9659..043c537ad860 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py @@ -31,6 +31,10 @@ class TabularTranslator(CopyTranslator): "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object """ _validation = { @@ -42,10 +46,12 @@ class TabularTranslator(CopyTranslator): 'type': {'key': 'type', 'type': 'str'}, 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, } def __init__(self, **kwargs): super(TabularTranslator, self).__init__(**kwargs) self.column_mappings = kwargs.get('column_mappings', None) self.schema_mapping = kwargs.get('schema_mapping', None) + self.collection_reference = kwargs.get('collection_reference', None) self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py index 0bd2ce51a0f0..cb1c11e5bb53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py @@ -31,6 +31,10 @@ class TabularTranslator(CopyTranslator): "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object """ _validation = { @@ -42,10 +46,12 @@ class TabularTranslator(CopyTranslator): 'type': {'key': 'type', 'type': 'str'}, 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, **kwargs) -> None: super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) self.column_mappings = column_mappings self.schema_mapping = schema_mapping + self.collection_reference = collection_reference self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index b3847d7dd9f4..78b89638b359 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -29,7 +29,7 @@ class TeradataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index 236741422023..e80b776454c0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -29,7 +29,7 @@ class TeradataLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py index 398402178ae4..728ffc32bcb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -35,6 +35,9 @@ class Trigger(Model): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +51,7 @@ class Trigger(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -60,4 +64,5 @@ def __init__(self, **kwargs): self.additional_properties = kwargs.get('additional_properties', None) self.description = kwargs.get('description', None) self.runtime_state = None + self.annotations = kwargs.get('annotations', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py index 09fb39534be1..862973544ab4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -35,6 +35,9 @@ class Trigger(Model): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +51,7 @@ class Trigger(Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -55,9 +59,10 @@ class Trigger(Model): 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } - def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.runtime_state = None + self.annotations = annotations self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py index ce46a4aac7e2..939624ae5042 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -32,6 +32,9 @@ class TumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an @@ -82,6 +85,7 @@ class TumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py index bc3114f08edd..6856629c8b91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py @@ -32,6 +32,9 @@ class TumblingWindowTrigger(Trigger): 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an @@ -82,6 +85,7 @@ class TumblingWindowTrigger(Trigger): 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, @@ -94,8 +98,8 @@ class TumblingWindowTrigger(Trigger): 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } - def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipeline = pipeline self.frequency = frequency self.interval = interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py new file mode 100644 index 000000000000..0d92d32c12b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py new file mode 100644 index 000000000000..f4680400b447 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py index fafba164a752..6b5e8d0103f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -29,7 +29,7 @@ class VerticaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py index 77caf915eaab..3aee3a5ae0f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py @@ -29,7 +29,7 @@ class VerticaLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py index 1670c0e9fc49..d0b642f15d38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -27,6 +27,10 @@ class VerticaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class VerticaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py index 6be2edd35218..a1c4d755f2b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py @@ -27,6 +27,10 @@ class VerticaSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class VerticaSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py new file mode 100644 index 000000000000..1c648c42c3e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py new file mode 100644 index 000000000000..40cdc6f732da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py index cee3bd37409c..18fadba3f3ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -29,7 +29,7 @@ class WebLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py index 3afa3a1bcb05..3e491b0fac4d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py @@ -29,7 +29,7 @@ class WebLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py index 13bcbfbb62d7..c5d3a2a8f00a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -27,6 +27,10 @@ class WebSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -39,6 +43,7 @@ class WebSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py index 7c5ce29d3d26..684e1d4233cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py @@ -27,6 +27,10 @@ class WebSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ @@ -39,9 +43,10 @@ class WebSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, **kwargs) -> None: - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py index e9daa4ff7d2a..24973f577133 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -29,7 +29,7 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py index eb665519f4ea..433c65ade739 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py @@ -29,7 +29,7 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py index 4695780bf41b..a37852a5b419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -27,6 +27,10 @@ class XeroSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class XeroSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py index 8de950856bae..bbee6c6fa1f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py @@ -27,6 +27,10 @@ class XeroSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class XeroSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py index 997efb5fc242..fe34dff77ea9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -29,7 +29,7 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py index c05d018146d6..f82f6221592b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py @@ -29,7 +29,7 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - Dataset. + linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py index 248d50d55297..274c6fc09f19 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -27,6 +27,10 @@ class ZohoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,6 +46,7 @@ class ZohoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py index 5f0547d9465a..6d7dc29bdf8a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py @@ -27,6 +27,10 @@ class ZohoSource(CopySource): with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or @@ -42,11 +46,12 @@ class ZohoSource(CopySource): 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py index 080e8c87ba18..4a648d96586c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -105,3 +105,75 @@ def get_feature_value( return deserialized get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} + + def get_feature_value_by_factory( + self, resource_group_name, factory_name, feature_name=None, feature_type=None, custom_headers=None, raw=False, **operation_config): + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExposureControlResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + + # Construct URL + url = self.get_feature_value_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ExposureControlResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py index 8a01ce6a8408..343396e705ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -312,7 +312,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} def create_run( - self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, parameters=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): """Creates a run of a pipeline. :param resource_group_name: The resource group name. @@ -325,6 +325,13 @@ def create_run( ID is specified the parameters of the specified run will be used to create a new run. :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to + true, the specified referenced pipeline run and the new run will be + grouped under the same groupId. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start + from this activity. If not specified, all activities will run. + :type start_activity_name: str :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. :type parameters: dict[str, object] @@ -353,6 +360,10 @@ def create_run( query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if reference_pipeline_run_id is not None: query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') # Construct headers header_parameters = {} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index 981739e4ff95..a39916c162ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.7.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 4b3ca4777aca..bc211c4c2c5f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -53,6 +53,7 @@ version=version, description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), long_description=readme + '\n\n' + history, + long_description_content_type='text/x-rst', license='MIT License', author='Microsoft Corporation', author_email='azpysdkhelp@microsoft.com', From 205aa235ac2e3ba464141e115d37d1755fa8cdef Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 13 Jun 2019 13:09:10 -0700 Subject: [PATCH 02/30] [AutoPR datafactory/resource-manager] Add Dataset and CopySource for SAP HANA (#5835) * Generated from 5f85e81e98e9fea4da62b1d4eed0a9bfc4b2bf5e Update Pipeline.json * Generated from 5f85e81e98e9fea4da62b1d4eed0a9bfc4b2bf5e Update Pipeline.json --- .../azure/mgmt/datafactory/models/__init__.py | 168 +++++++++++++++--- .../datafactory/models/amazon_s3_location.py | 55 ++++++ .../models/amazon_s3_location_py3.py | 55 ++++++ .../models/amazon_s3_read_setting.py | 78 ++++++++ .../models/amazon_s3_read_setting_py3.py | 78 ++++++++ .../models/azure_blob_fs_location.py | 50 ++++++ .../models/azure_blob_fs_location_py3.py | 50 ++++++ .../models/azure_blob_fs_read_setting.py | 73 ++++++++ .../models/azure_blob_fs_read_setting_py3.py | 73 ++++++++ .../datafactory/models/azure_blob_fs_sink.py | 8 +- .../models/azure_blob_fs_sink_py3.py | 8 +- .../models/azure_blob_fs_write_setting.py | 45 +++++ .../models/azure_blob_fs_write_setting_py3.py | 45 +++++ .../models/azure_blob_storage_location.py | 50 ++++++ .../models/azure_blob_storage_location_py3.py | 50 ++++++ .../models/azure_blob_storage_read_setting.py | 73 ++++++++ .../azure_blob_storage_read_setting_py3.py | 73 ++++++++ .../azure_blob_storage_write_setting.py | 45 +++++ .../azure_blob_storage_write_setting_py3.py | 45 +++++ .../models/azure_data_lake_store_location.py | 45 +++++ .../azure_data_lake_store_location_py3.py | 45 +++++ .../azure_data_lake_store_read_setting.py | 73 ++++++++ .../azure_data_lake_store_read_setting_py3.py | 73 ++++++++ .../models/azure_data_lake_store_sink.py | 12 +- .../models/azure_data_lake_store_sink_py3.py | 14 +- .../azure_data_lake_store_write_setting.py | 45 +++++ ...azure_data_lake_store_write_setting_py3.py | 45 +++++ .../models/azure_search_index_sink.py | 7 +- .../models/azure_search_index_sink_py3.py | 7 +- .../models/azure_sql_dw_table_dataset.py | 5 +- .../models/azure_sql_dw_table_dataset_py3.py | 7 +- .../mgmt/datafactory/models/azure_sql_sink.py | 87 +++++++++ .../datafactory/models/azure_sql_sink_py3.py | 87 +++++++++ .../datafactory/models/azure_sql_source.py | 73 ++++++++ .../models/azure_sql_source_py3.py | 73 ++++++++ .../models/azure_sql_table_dataset.py | 5 +- .../models/azure_sql_table_dataset_py3.py | 7 +- .../mgmt/datafactory/models/blob_sink.py | 8 +- .../mgmt/datafactory/models/blob_sink_py3.py | 8 +- .../datafactory/models/cassandra_source.py | 9 +- .../models/cassandra_source_py3.py | 9 +- .../models/connector_read_setting.py | 45 +++++ .../models/connector_read_setting_py3.py | 45 +++++ .../models/connector_write_setting.py | 49 +++++ .../models/connector_write_setting_py3.py | 49 +++++ .../mgmt/datafactory/models/copy_activity.py | 8 +- .../datafactory/models/copy_activity_py3.py | 10 +- .../mgmt/datafactory/models/copy_sink.py | 9 +- .../mgmt/datafactory/models/copy_sink_py3.py | 9 +- .../mgmt/datafactory/models/copy_source.py | 8 +- .../datafactory/models/copy_source_py3.py | 8 +- .../data_factory_management_client_enums.py | 64 +------ .../azure/mgmt/datafactory/models/dataset.py | 23 +-- .../models/dataset_deflate_compression.py | 7 +- .../models/dataset_deflate_compression_py3.py | 7 +- .../models/dataset_gzip_compression.py | 7 +- .../models/dataset_gzip_compression_py3.py | 7 +- .../datafactory/models/dataset_location.py | 49 +++++ .../models/dataset_location_py3.py | 49 +++++ .../mgmt/datafactory/models/dataset_py3.py | 23 +-- .../models/dataset_zip_deflate_compression.py | 7 +- .../dataset_zip_deflate_compression_py3.py | 7 +- .../models/delimited_text_dataset.py | 122 +++++++++++++ .../models/delimited_text_dataset_py3.py | 122 +++++++++++++ .../models/delimited_text_read_setting.py | 43 +++++ .../models/delimited_text_read_setting_py3.py | 43 +++++ .../datafactory/models/delimited_text_sink.py | 70 ++++++++ .../models/delimited_text_sink_py3.py | 70 ++++++++ .../models/delimited_text_source.py | 61 +++++++ .../models/delimited_text_source_py3.py | 61 +++++++ .../models/delimited_text_write_setting.py | 49 +++++ .../delimited_text_write_setting_py3.py | 49 +++++ .../models/document_db_collection_sink.py | 5 + .../models/document_db_collection_sink_py3.py | 7 +- .../models/dynamics_ax_resource_dataset.py | 4 +- .../dynamics_ax_resource_dataset_py3.py | 6 +- .../mgmt/datafactory/models/dynamics_sink.py | 7 +- .../datafactory/models/dynamics_sink_py3.py | 7 +- .../datafactory/models/entity_reference.py | 34 ++++ .../models/entity_reference_py3.py | 34 ++++ .../models/file_server_location.py | 45 +++++ .../models/file_server_location_py3.py | 45 +++++ .../models/file_server_read_setting.py | 73 ++++++++ .../models/file_server_read_setting_py3.py | 73 ++++++++ .../models/file_server_write_setting.py | 45 +++++ .../models/file_server_write_setting_py3.py | 45 +++++ .../datafactory/models/file_system_sink.py | 8 +- .../models/file_system_sink_py3.py | 8 +- ...y_translator.py => format_read_setting.py} | 17 +- ...ator_py3.py => format_read_setting_py3.py} | 19 +- .../models/format_write_setting.py | 39 ++++ .../models/format_write_setting_py3.py | 39 ++++ .../datafactory/models/ftp_read_setting.py | 63 +++++++ .../models/ftp_read_setting_py3.py | 63 +++++++ .../datafactory/models/ftp_server_location.py | 45 +++++ .../models/ftp_server_location_py3.py | 45 +++++ .../mgmt/datafactory/models/hdfs_location.py | 45 +++++ .../datafactory/models/hdfs_location_py3.py | 45 +++++ .../datafactory/models/hdfs_read_setting.py | 77 ++++++++ .../models/hdfs_read_setting_py3.py | 77 ++++++++ .../datafactory/models/http_read_setting.py | 63 +++++++ .../models/http_read_setting_py3.py | 63 +++++++ .../models/http_server_location.py | 50 ++++++ .../models/http_server_location_py3.py | 50 ++++++ ...tegration_runtime_data_proxy_properties.py | 37 ++++ ...ation_runtime_data_proxy_properties_py3.py | 37 ++++ .../integration_runtime_ssis_properties.py | 6 + ...integration_runtime_ssis_properties_py3.py | 8 +- .../mgmt/datafactory/models/json_format.py | 8 +- .../datafactory/models/json_format_py3.py | 8 +- .../mgmt/datafactory/models/linked_service.py | 43 ++--- .../datafactory/models/linked_service_py3.py | 43 ++--- .../models/oracle_partition_settings.py | 46 +++++ .../models/oracle_partition_settings_py3.py | 46 +++++ .../mgmt/datafactory/models/oracle_source.py | 11 ++ .../datafactory/models/oracle_source_py3.py | 13 +- .../datafactory/models/parquet_dataset.py | 76 ++++++++ .../datafactory/models/parquet_dataset_py3.py | 76 ++++++++ .../mgmt/datafactory/models/parquet_sink.py | 65 +++++++ .../datafactory/models/parquet_sink_py3.py | 65 +++++++ .../mgmt/datafactory/models/parquet_source.py | 56 ++++++ .../datafactory/models/parquet_source_py3.py | 56 ++++++ .../datafactory/models/salesforce_sink.py | 7 +- .../datafactory/models/salesforce_sink_py3.py | 7 +- .../datafactory/models/salesforce_source.py | 7 +- .../models/salesforce_source_py3.py | 7 +- .../models/sap_cloud_for_customer_sink.py | 7 +- .../models/sap_cloud_for_customer_sink_py3.py | 7 +- .../models/sap_ecc_resource_dataset.py | 4 +- .../models/sap_ecc_resource_dataset_py3.py | 6 +- .../mgmt/datafactory/models/sap_ecc_source.py | 4 +- .../datafactory/models/sap_ecc_source_py3.py | 6 +- .../models/sap_hana_linked_service.py | 5 + .../models/sap_hana_linked_service_py3.py | 7 +- .../datafactory/models/sap_hana_source.py | 62 +++++++ .../datafactory/models/sap_hana_source_py3.py | 62 +++++++ .../models/sap_hana_table_dataset.py | 77 ++++++++ .../models/sap_hana_table_dataset_py3.py | 77 ++++++++ .../models/sap_table_linked_service.py | 140 +++++++++++++++ .../models/sap_table_linked_service_py3.py | 140 +++++++++++++++ .../models/sap_table_partition_settings.py | 47 +++++ .../sap_table_partition_settings_py3.py | 47 +++++ .../models/sap_table_resource_dataset.py | 73 ++++++++ .../models/sap_table_resource_dataset_py3.py | 73 ++++++++ .../datafactory/models/sap_table_source.py | 97 ++++++++++ .../models/sap_table_source_py3.py | 97 ++++++++++ .../mgmt/datafactory/models/sftp_location.py | 45 +++++ .../datafactory/models/sftp_location_py3.py | 45 +++++ .../datafactory/models/sftp_read_setting.py | 68 +++++++ .../models/sftp_read_setting_py3.py | 68 +++++++ .../datafactory/models/sql_server_sink.py | 87 +++++++++ .../datafactory/models/sql_server_sink_py3.py | 87 +++++++++ .../datafactory/models/sql_server_source.py | 73 ++++++++ .../models/sql_server_source_py3.py | 73 ++++++++ .../models/sql_server_table_dataset.py | 5 +- .../models/sql_server_table_dataset_py3.py | 7 +- .../azure/mgmt/datafactory/models/sql_sink.py | 6 + .../mgmt/datafactory/models/sql_sink_py3.py | 8 +- .../models/stored_procedure_parameter.py | 2 +- .../models/stored_procedure_parameter_py3.py | 2 +- .../datafactory/models/tabular_translator.py | 57 ------ .../models/tabular_translator_py3.py | 57 ------ 162 files changed, 6277 insertions(+), 471 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{copy_translator.py => format_read_setting.py} (73%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{copy_translator_py3.py => format_read_setting_py3.py} (68%) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index f8279c1a99bd..f08086a1e9f9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -96,6 +96,7 @@ from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger from .azure_function_linked_service_py3 import AzureFunctionLinkedService from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .sap_table_linked_service_py3 import SapTableLinkedService from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService @@ -230,9 +231,11 @@ from .http_dataset_py3 import HttpDataset from .azure_search_index_dataset_py3 import AzureSearchIndexDataset from .web_table_dataset_py3 import WebTableDataset + from .sap_table_resource_dataset_py3 import SapTableResourceDataset from .rest_resource_dataset_py3 import RestResourceDataset from .sql_server_table_dataset_py3 import SqlServerTableDataset from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset + from .sap_hana_table_dataset_py3 import SapHanaTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset @@ -255,6 +258,18 @@ from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset from .activity_policy_py3 import ActivityPolicy from .azure_function_activity_py3 import AzureFunctionActivity @@ -313,6 +328,7 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -321,9 +337,14 @@ from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource from .rest_source_py3 import RestSource + from .sap_table_partition_settings_py3 import SapTablePartitionSettings + from .sap_table_source_py3 import SapTableSource from .sap_open_hub_source_py3 import SapOpenHubSource + from .sap_hana_source_py3 import SapHanaSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_source_py3 import SalesforceSource @@ -332,6 +353,20 @@ from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource + from .format_read_setting_py3 import FormatReadSetting + from .delimited_text_read_setting_py3 import DelimitedTextReadSetting + from .hdfs_read_setting_py3 import HdfsReadSetting + from .http_read_setting_py3 import HttpReadSetting + from .sftp_read_setting_py3 import SftpReadSetting + from .ftp_read_setting_py3 import FtpReadSetting + from .file_server_read_setting_py3 import FileServerReadSetting + from .amazon_s3_read_setting_py3 import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting_py3 import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting_py3 import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting_py3 import AzureBlobStorageReadSetting + from .connector_read_setting_py3 import ConnectorReadSetting + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity from .log_storage_settings_py3 import LogStorageSettings @@ -351,8 +386,6 @@ from .hd_insight_hive_activity_py3 import HDInsightHiveActivity from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings - from .tabular_translator_py3 import TabularTranslator - from .copy_translator_py3 import CopyTranslator from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_sink_py3 import SalesforceSink from .azure_data_explorer_sink_py3 import AzureDataExplorerSink @@ -364,13 +397,24 @@ from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink from .sql_sink_py3 import SqlSink from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink + from .file_server_write_setting_py3 import FileServerWriteSetting + from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting + from .connector_write_setting_py3 import ConnectorWriteSetting + from .parquet_sink_py3 import ParquetSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .format_write_setting_py3 import FormatWriteSetting + from .delimited_text_write_setting_py3 import DelimitedTextWriteSetting + from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity @@ -396,6 +440,8 @@ from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties @@ -504,6 +550,7 @@ from .multiple_pipeline_trigger import MultiplePipelineTrigger from .azure_function_linked_service import AzureFunctionLinkedService from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .sap_table_linked_service import SapTableLinkedService from .google_ad_words_linked_service import GoogleAdWordsLinkedService from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService from .dynamics_ax_linked_service import DynamicsAXLinkedService @@ -638,9 +685,11 @@ from .http_dataset import HttpDataset from .azure_search_index_dataset import AzureSearchIndexDataset from .web_table_dataset import WebTableDataset + from .sap_table_resource_dataset import SapTableResourceDataset from .rest_resource_dataset import RestResourceDataset from .sql_server_table_dataset import SqlServerTableDataset from .sap_open_hub_table_dataset import SapOpenHubTableDataset + from .sap_hana_table_dataset import SapHanaTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset from .salesforce_object_dataset import SalesforceObjectDataset @@ -663,6 +712,18 @@ from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset from .amazon_s3_dataset import AmazonS3Dataset from .activity_policy import ActivityPolicy from .azure_function_activity import AzureFunctionActivity @@ -721,6 +782,7 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -729,9 +791,14 @@ from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource from .sql_source import SqlSource from .rest_source import RestSource + from .sap_table_partition_settings import SapTablePartitionSettings + from .sap_table_source import SapTableSource from .sap_open_hub_source import SapOpenHubSource + from .sap_hana_source import SapHanaSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_source import SalesforceSource @@ -740,6 +807,20 @@ from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource + from .format_read_setting import FormatReadSetting + from .delimited_text_read_setting import DelimitedTextReadSetting + from .hdfs_read_setting import HdfsReadSetting + from .http_read_setting import HttpReadSetting + from .sftp_read_setting import SftpReadSetting + from .ftp_read_setting import FtpReadSetting + from .file_server_read_setting import FileServerReadSetting + from .amazon_s3_read_setting import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting import AzureBlobStorageReadSetting + from .connector_read_setting import ConnectorReadSetting + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource from .copy_source import CopySource from .lookup_activity import LookupActivity from .log_storage_settings import LogStorageSettings @@ -759,8 +840,6 @@ from .hd_insight_hive_activity import HDInsightHiveActivity from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings - from .tabular_translator import TabularTranslator - from .copy_translator import CopyTranslator from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_sink import SalesforceSink from .azure_data_explorer_sink import AzureDataExplorerSink @@ -772,13 +851,24 @@ from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink from .sql_sink import SqlSink from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink + from .file_server_write_setting import FileServerWriteSetting + from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting + from .connector_write_setting import ConnectorWriteSetting + from .parquet_sink import ParquetSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .format_write_setting import FormatWriteSetting + from .delimited_text_write_setting import DelimitedTextWriteSetting + from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity @@ -804,6 +894,8 @@ from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties @@ -874,25 +966,18 @@ TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, - DatasetCompressionLevel, - JsonFormatFilePattern, AzureFunctionActivityMethod, WebActivityMethod, - CassandraSourceReadConsistencyLevels, StoredProcedureParameterType, - SalesforceSourceReadBehavior, HDInsightActivityDebugInfoOption, - SalesforceSinkWriteBehavior, - AzureSearchIndexWriteBehaviorType, - CopyBehaviorType, PolybaseSettingsRejectType, - SapCloudForCustomerSinkWriteBehavior, WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, IntegrationRuntimeInternalChannelEncryptionMode, ManagedIntegrationRuntimeNodeStatus, + IntegrationRuntimeEntityReferenceType, IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, @@ -987,6 +1072,7 @@ 'MultiplePipelineTrigger', 'AzureFunctionLinkedService', 'AzureDataExplorerLinkedService', + 'SapTableLinkedService', 'GoogleAdWordsLinkedService', 'OracleServiceCloudLinkedService', 'DynamicsAXLinkedService', @@ -1121,9 +1207,11 @@ 'HttpDataset', 'AzureSearchIndexDataset', 'WebTableDataset', + 'SapTableResourceDataset', 'RestResourceDataset', 'SqlServerTableDataset', 'SapOpenHubTableDataset', + 'SapHanaTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', 'SalesforceObjectDataset', @@ -1146,6 +1234,18 @@ 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', + 'HdfsLocation', + 'HttpServerLocation', + 'SftpLocation', + 'FtpServerLocation', + 'FileServerLocation', + 'AmazonS3Location', + 'AzureDataLakeStoreLocation', + 'AzureBlobFSLocation', + 'AzureBlobStorageLocation', + 'DatasetLocation', + 'DelimitedTextDataset', + 'ParquetDataset', 'AmazonS3Dataset', 'ActivityPolicy', 'AzureFunctionActivity', @@ -1204,6 +1304,7 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', + 'OraclePartitionSettings', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1212,9 +1313,14 @@ 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', + 'AzureSqlSource', + 'SqlServerSource', 'SqlSource', 'RestSource', + 'SapTablePartitionSettings', + 'SapTableSource', 'SapOpenHubSource', + 'SapHanaSource', 'SapEccSource', 'SapCloudForCustomerSource', 'SalesforceSource', @@ -1223,6 +1329,20 @@ 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', + 'FormatReadSetting', + 'DelimitedTextReadSetting', + 'HdfsReadSetting', + 'HttpReadSetting', + 'SftpReadSetting', + 'FtpReadSetting', + 'FileServerReadSetting', + 'AmazonS3ReadSetting', + 'AzureDataLakeStoreReadSetting', + 'AzureBlobFSReadSetting', + 'AzureBlobStorageReadSetting', + 'ConnectorReadSetting', + 'DelimitedTextSource', + 'ParquetSource', 'CopySource', 'LookupActivity', 'LogStorageSettings', @@ -1242,8 +1362,6 @@ 'HDInsightHiveActivity', 'RedirectIncompatibleRowSettings', 'StagingSettings', - 'TabularTranslator', - 'CopyTranslator', 'CosmosDbMongoDbApiSink', 'SalesforceSink', 'AzureDataExplorerSink', @@ -1255,13 +1373,24 @@ 'OracleSink', 'PolybaseSettings', 'SqlDWSink', + 'AzureSqlSink', + 'SqlServerSink', 'SqlSink', 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', + 'FileServerWriteSetting', + 'AzureDataLakeStoreWriteSetting', + 'AzureBlobFSWriteSetting', + 'AzureBlobStorageWriteSetting', + 'ConnectorWriteSetting', + 'ParquetSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', + 'FormatWriteSetting', + 'DelimitedTextWriteSetting', + 'DelimitedTextSink', 'CopySink', 'CopyActivity', 'ExecutionActivity', @@ -1287,6 +1416,8 @@ 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeType', 'SelfHostedIntegrationRuntime', + 'EntityReference', + 'IntegrationRuntimeDataProxyProperties', 'IntegrationRuntimeCustomSetupScriptProperties', 'IntegrationRuntimeSsisCatalogInfo', 'IntegrationRuntimeSsisProperties', @@ -1356,25 +1487,18 @@ 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', - 'DatasetCompressionLevel', - 'JsonFormatFilePattern', 'AzureFunctionActivityMethod', 'WebActivityMethod', - 'CassandraSourceReadConsistencyLevels', 'StoredProcedureParameterType', - 'SalesforceSourceReadBehavior', 'HDInsightActivityDebugInfoOption', - 'SalesforceSinkWriteBehavior', - 'AzureSearchIndexWriteBehaviorType', - 'CopyBehaviorType', 'PolybaseSettingsRejectType', - 'SapCloudForCustomerSinkWriteBehavior', 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', 'IntegrationRuntimeInternalChannelEncryptionMode', 'ManagedIntegrationRuntimeNodeStatus', + 'IntegrationRuntimeEntityReferenceType', 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py new file mode 100644 index 000000000000..74c77a16f0f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py new file mode 100644 index 000000000000..4de7e0ebb7b9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py new file mode 100644 index 000000000000..deda331ea561 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py new file mode 100644 index 000000000000..c21525bbac4c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py new file mode 100644 index 000000000000..11490a288417 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py new file mode 100644 index 000000000000..28f3b4f7ceb4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py index 59e070c64fe8..a47b173c6581 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -40,10 +40,8 @@ class AzureBlobFSSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py index 35ad6a97dbfe..e2b28bf30a8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -40,10 +40,8 @@ class AzureBlobFSSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py new file mode 100644 index 000000000000..d5b2d850da58 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py new file mode 100644 index 000000000000..62196ff73838 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py new file mode 100644 index 000000000000..1efbbeaec352 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py new file mode 100644 index 000000000000..ee07a3576f29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py new file mode 100644 index 000000000000..3e3d35774a46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py new file mode 100644 index 000000000000..a6499dfda798 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py new file mode 100644 index 000000000000..9abb68c06055 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobStorageWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py new file mode 100644 index 000000000000..a4bf521a2005 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py new file mode 100644 index 000000000000..0f0dfe7f7c58 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py new file mode 100644 index 000000000000..b9159463d681 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index 145c7c61358a..e882698c2ca6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -40,10 +40,10 @@ class AzureDataLakeStoreSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -58,10 +58,12 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreSink, self).__init__(**kwargs) self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py index d3e16339fef2..0f96cea725e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -40,10 +40,10 @@ class AzureDataLakeStoreSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -58,10 +58,12 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py new file mode 100644 index 000000000000..d7875f545e77 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py new file mode 100644 index 000000000000..e05ddcbaeaac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index 9aae64af8da0..af2505be7a5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -41,9 +41,8 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + into Azure Search Index. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 3cd887a2512c..9e57f2f1feb3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -41,9 +41,8 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + into Azure Search Index. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py index e12464efdd49..0921505515d5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -43,15 +43,14 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL Data - Warehouse. Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py index a06073f86c5b..0be72998fc64 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -43,15 +43,14 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL Data - Warehouse. Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class AzureSqlDWTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py new file mode 100644 index 000000000000..441bf0c4279f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..6aa431ae57d6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py new file mode 100644 index 000000000000..b6c62f9a3164 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py index 9a078241d620..8d8d90d76e85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -43,15 +43,14 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL database. - Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL database. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py index b3c388202f52..3d4f1eac3f58 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -43,15 +43,14 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure SQL database. - Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure SQL database. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class AzureSqlTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index 8a050cf9cc64..284e0fcecde5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -49,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -70,7 +68,7 @@ class BlobSink(CopySink): 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py index 8fca0ac5cacc..370acc72e017 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -49,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -70,7 +68,7 @@ class BlobSink(CopySink): 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index e7ba96c18682..8a52f03cd5ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -42,11 +42,8 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + case-insensitive. + :type consistency_level: object """ _validation = { @@ -60,7 +57,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index bd95d158b868..6957385bab86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -42,11 +42,8 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + case-insensitive. + :type consistency_level: object """ _validation = { @@ -60,7 +57,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py new file mode 100644 index 000000000000..e0af269aaafd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py new file mode 100644 index 000000000000..f6403ade8f71 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py new file mode 100644 index 000000000000..65daf9f07794 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py new file mode 100644 index 000000000000..7f4ea65c916d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index f73d34fcb3ce..2e7c00d551ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -67,6 +67,8 @@ class CopyActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -91,7 +93,7 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, @@ -99,6 +101,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -115,6 +118,7 @@ def __init__(self, **kwargs): self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py index a02cd5d89e10..f8a1fee5625d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -67,6 +67,8 @@ class CopyActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -91,7 +93,7 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, @@ -99,11 +101,12 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -115,6 +118,7 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.preserve_rules = preserve_rules + self.preserve = preserve self.inputs = inputs self.outputs = outputs self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 6f714f7947d1..43117547e1ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -18,9 +18,10 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, - AzureQueueSink, SapCloudForCustomerSink + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -64,7 +65,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index ea4f320e9bc6..4f9ebc84173c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -18,9 +18,10 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, - AzureQueueSink, SapCloudForCustomerSink + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -64,7 +65,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 4f3da1e8cf85..c261c385de8d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -28,9 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -65,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index eb439548481a..5a0b7d807b7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -28,9 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource + DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -65,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index b372cb91d8ef..ded527b2602a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -321,18 +321,6 @@ class SybaseAuthenticationType(str, Enum): windows = "Windows" -class DatasetCompressionLevel(str, Enum): - - optimal = "Optimal" - fastest = "Fastest" - - -class JsonFormatFilePattern(str, Enum): - - set_of_objects = "setOfObjects" - array_of_objects = "arrayOfObjects" - - class AzureFunctionActivityMethod(str, Enum): get = "GET" @@ -352,36 +340,17 @@ class WebActivityMethod(str, Enum): delete = "DELETE" -class CassandraSourceReadConsistencyLevels(str, Enum): - - all = "ALL" - each_quorum = "EACH_QUORUM" - quorum = "QUORUM" - local_quorum = "LOCAL_QUORUM" - one = "ONE" - two = "TWO" - three = "THREE" - local_one = "LOCAL_ONE" - serial = "SERIAL" - local_serial = "LOCAL_SERIAL" - - class StoredProcedureParameterType(str, Enum): string = "String" int_enum = "Int" + int64 = "Int64" decimal_enum = "Decimal" guid = "Guid" boolean = "Boolean" date_enum = "Date" -class SalesforceSourceReadBehavior(str, Enum): - - query = "Query" - query_all = "QueryAll" - - class HDInsightActivityDebugInfoOption(str, Enum): none = "None" @@ -389,37 +358,12 @@ class HDInsightActivityDebugInfoOption(str, Enum): failure = "Failure" -class SalesforceSinkWriteBehavior(str, Enum): - - insert = "Insert" - upsert = "Upsert" - - -class AzureSearchIndexWriteBehaviorType(str, Enum): - - merge = "Merge" - upload = "Upload" - - -class CopyBehaviorType(str, Enum): - - preserve_hierarchy = "PreserveHierarchy" - flatten_hierarchy = "FlattenHierarchy" - merge_files = "MergeFiles" - - class PolybaseSettingsRejectType(str, Enum): value = "value" percentage = "percentage" -class SapCloudForCustomerSinkWriteBehavior(str, Enum): - - insert = "Insert" - update = "Update" - - class WebHookActivityMethod(str, Enum): post = "POST" @@ -464,6 +408,12 @@ class ManagedIntegrationRuntimeNodeStatus(str, Enum): unavailable = "Unavailable" +class IntegrationRuntimeEntityReferenceType(str, Enum): + + integration_runtime_reference = "IntegrationRuntimeReference" + linked_service_reference = "LinkedServiceReference" + + class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): basic = "Basic" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index af540b1e6429..76d8375b3da9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -30,16 +30,17 @@ class Dataset(Model): GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, - WebTableDataset, RestResourceDataset, SqlServerTableDataset, - SapOpenHubTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SalesforceObjectDataset, - RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, - AzureTableDataset, AzureBlobDataset, AmazonS3Dataset + WebTableDataset, SapTableResourceDataset, RestResourceDataset, + SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, + SapEccResourceDataset, SapCloudForCustomerResourceDataset, + SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, + OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, + DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, + AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, + AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -89,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py index c16c0611b364..9c97e2bfa5e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py index 715fe91a12a3..11d00081bc1c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py index 48317d06f34e..4925127c7f0f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py index 99b1081469f8..97346e06366d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py new file mode 100644 index 000000000000..2c318a91cccb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index adc64b228236..c793e32f2251 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -30,16 +30,17 @@ class Dataset(Model): GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, - WebTableDataset, RestResourceDataset, SqlServerTableDataset, - SapOpenHubTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SalesforceObjectDataset, - RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, - AzureTableDataset, AzureBlobDataset, AmazonS3Dataset + WebTableDataset, SapTableResourceDataset, RestResourceDataset, + SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, + SapEccResourceDataset, SapCloudForCustomerResourceDataset, + SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, + OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, + DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, + AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, + AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -89,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py index 9312098be5a3..ed80bf3cbcf2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py index 74fbb92ce1ab..20abd6fe1088 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py new file mode 100644 index 000000000000..bfee26fcd12c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py new file mode 100644 index 000000000000..004eb595a05e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSetting, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py new file mode 100644 index 000000000000..87915fcb3db7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting_py3 import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py new file mode 100644 index 000000000000..ae93f209c8b3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..a1ba953a2662 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py new file mode 100644 index 000000000000..9f2067d24b9c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..b158f97bde81 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py new file mode 100644 index 000000000000..21fe168f1316 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSetting, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py new file mode 100644 index 000000000000..ac0e3b2d00cc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting_py3 import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 25e80ee45466..c2908dc1dd05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -43,6 +43,9 @@ class DocumentDbCollectionSink(CopySink): :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -58,9 +61,11 @@ class DocumentDbCollectionSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): super(DocumentDbCollectionSink, self).__init__(**kwargs) self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py index 111897036215..f1410cd211a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -43,6 +43,9 @@ class DocumentDbCollectionSink(CopySink): :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -58,9 +61,11 @@ class DocumentDbCollectionSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator + self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py index 233c4c99d4df..392b8ac7b971 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -45,7 +45,7 @@ class DynamicsAXResourceDataset(Dataset): :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class DynamicsAXResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py index 788c9084ee9b..6cade3e4aa59 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -45,7 +45,7 @@ class DynamicsAXResourceDataset(Dataset): :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class DynamicsAXResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 45bac7b52064..5afce6ced25b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -44,8 +44,7 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str + :vartype write_behavior: object :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -65,11 +64,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = "Upsert" + write_behavior = None def __init__(self, **kwargs): super(DynamicsSink, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index 5f736f9cf658..ffdb08363bfd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -44,8 +44,7 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str + :vartype write_behavior: object :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -65,11 +64,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = "Upsert" + write_behavior = None def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py new file mode 100644 index 000000000000..5db1448a5a55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py new file mode 100644 index 000000000000..edce5fe68a65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py new file mode 100644 index 000000000000..6ba2a5f56b79 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py new file mode 100644 index 000000000000..4393692d63f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py new file mode 100644 index 000000000000..9342210abdfb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSetting, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py new file mode 100644 index 000000000000..1ed4bf220417 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 75baab87456e..8b8f238c9534 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -40,10 +40,8 @@ class FileSystemSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class FileSystemSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py index 92388128726e..24f8623cbb02 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -40,10 +40,8 @@ class FileSystemSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class FileSystemSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py similarity index 73% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py index 2b0242ef997c..730cec9f525f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py @@ -12,18 +12,15 @@ from msrest.serialization import Model -class CopyTranslator(Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator +class FormatReadSetting(Model): + """Format read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Required. The read setting type. :type type: str """ @@ -36,11 +33,7 @@ class CopyTranslator(Model): 'type': {'key': 'type', 'type': 'str'}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - def __init__(self, **kwargs): - super(CopyTranslator, self).__init__(**kwargs) + super(FormatReadSetting, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = None + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py similarity index 68% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py index 3fef58394fd0..ed68bf35f009 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py @@ -12,18 +12,15 @@ from msrest.serialization import Model -class CopyTranslator(Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator +class FormatReadSetting(Model): + """Format read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Required. The read setting type. :type type: str """ @@ -36,11 +33,7 @@ class CopyTranslator(Model): 'type': {'key': 'type', 'type': 'str'}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(CopyTranslator, self).__init__(**kwargs) + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSetting, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type = None + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py new file mode 100644 index 000000000000..0fd6966859d5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py new file mode 100644 index 000000000000..3e5609066208 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py new file mode 100644 index 000000000000..137a56948deb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py new file mode 100644 index 000000000000..5294301e4fd8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py new file mode 100644 index 000000000000..5d5e933036df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py new file mode 100644 index 000000000000..a8f5d1ba332c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py new file mode 100644 index 000000000000..4fdadbc2fcd0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py new file mode 100644 index 000000000000..164a6f497e52 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py new file mode 100644 index 000000000000..696a9fdb3faf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSetting, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py new file mode 100644 index 000000000000..3d5d75a80785 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py new file mode 100644 index 000000000000..94106fae9d15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py new file mode 100644 index 000000000000..ebc0e9b38d6f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py index e1a091166529..293f071aa0b3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,6 +45,7 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } @@ -50,4 +55,5 @@ def __init__(self, **kwargs): self.catalog_info = kwargs.get('catalog_info', None) self.license_type = kwargs.get('license_type', None) self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) self.edition = kwargs.get('edition', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py index eb70dd23ddb7..f75775e29a7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,13 +45,15 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, edition=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info self.license_type = license_type self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py index 736f9500018f..80f4ff0aaf8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py index a9a7f20ea103..2fdb44cc3b7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 3d4660d72e89..81ce26e5b657 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -19,26 +19,27 @@ class LinkedService(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, - OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, - ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, - HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, - EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, - ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, - SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, - FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, - CustomDataSourceLinkedService, AmazonRedshiftLinkedService, - AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, - SapEccLinkedService, SapCloudForCustomerLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceLinkedService, + Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, @@ -85,7 +86,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index eadf4030e132..1ec0d17d24c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -19,26 +19,27 @@ class LinkedService(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, - OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, - ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, - HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, - EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, - ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, - SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, - FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, - CustomDataSourceLinkedService, AmazonRedshiftLinkedService, - AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, - SapEccLinkedService, SapCloudForCustomerLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceLinkedService, + Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, @@ -85,7 +86,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..ed970fd7729a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..c3d00b09ad90 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 12b3aa31353f..84ad79ed19c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -40,6 +40,13 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +61,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__(self, **kwargs): super(OracleSource, self).__init__(**kwargs) self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 43afe27fda2f..dfcbd2e0330d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -40,6 +40,13 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +61,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py new file mode 100644 index 000000000000..ffaf8e1f6d93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py new file mode 100644 index 000000000000..38c634ed10dd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..96c0c1b57926 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py new file mode 100644 index 000000000000..02e74641d506 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..bfe077dd9999 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 9a1291bd4bfe..4d1a93c08915 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -41,9 +41,8 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + Insert. + :type write_behavior: object :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -71,7 +70,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index 54a56618d01e..ed7591fbb59b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -41,9 +41,8 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + Insert. + :type write_behavior: object :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -71,7 +70,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 4f2590c3ab9d..57a10411f487 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -37,9 +37,8 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + Query. + :type read_behavior: object """ _validation = { @@ -53,7 +52,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 4441e92eaff3..08e6776f5f98 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -37,9 +37,8 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + Query. + :type read_behavior: object """ _validation = { @@ -53,7 +52,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index e5a37858abb5..ae99093f277e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -41,9 +41,8 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + 'Insert'. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index 29f01fdd6891..bdbc2cefcbd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -41,9 +41,8 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + 'Insert'. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py index e4f10113aecd..f79367f49b3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py index 08bf742dc415..76aaeb9bb9f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 6a2d17862d6b..6379c33713d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -35,7 +35,7 @@ class SapEccSource(CopySource): :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -48,7 +48,7 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py index 95a11500bd24..4412cac39960 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -35,7 +35,7 @@ class SapEccSource(CopySource): :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -48,10 +48,10 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index 391bd79f8c28..14eda87b7be6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -33,6 +33,9 @@ class SapHanaLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object @@ -63,6 +66,7 @@ class SapHanaLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, @@ -72,6 +76,7 @@ class SapHanaLinkedService(LinkedService): def __init__(self, **kwargs): super(SapHanaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py index bbf307d1bede..de378a5b2bf3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -33,6 +33,9 @@ class SapHanaLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object @@ -63,6 +66,7 @@ class SapHanaLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, @@ -70,8 +74,9 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.user_name = user_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py new file mode 100644 index 000000000000..e946dbcd9a50 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py new file mode 100644 index 000000000000..730326c19183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.packet_size = packet_size + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py new file mode 100644 index 000000000000..6ff1ae31cd22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaTableDataset, self).__init__(**kwargs) + self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py new file mode 100644 index 000000000000..6dc5c48ba21d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py new file mode 100644 index 000000000000..83b76d0a4fdd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py new file mode 100644 index 000000000000..d098acc1bbda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py new file mode 100644 index 000000000000..b688fe16683b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py new file mode 100644 index 000000000000..37bdf610ab35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py new file mode 100644 index 000000000000..24601ba6b793 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableResourceDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py new file mode 100644 index 000000000000..7b034ccd3a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py new file mode 100644 index 000000000000..1c52db3eb0f8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(SapTableSource, self).__init__(**kwargs) + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py new file mode 100644 index 000000000000..8e8fbdf12002 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py new file mode 100644 index 000000000000..5b8fd4e42ba2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py new file mode 100644 index 000000000000..e0cd7ea8fda1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py new file mode 100644 index 000000000000..39beb756905a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py new file mode 100644 index 000000000000..45b1f1273903 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dbe1bf44e418 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py new file mode 100644 index 000000000000..f9aa011047ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py index c0bc0b66a5e2..d50540de4704 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -43,15 +43,14 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the SQL Server dataset. - Type: string (or Expression with resultType string). + :param table_name: The table name of the SQL Server dataset. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py index 0fb8d10ea111..bc8d4bec92e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -43,15 +43,14 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: Required. The table name of the SQL Server dataset. - Type: string (or Expression with resultType string). + :param table_name: The table name of the SQL Server dataset. Type: string + (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -67,7 +66,7 @@ class SqlServerTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 9c2ebd2b389f..7ec0313aab4b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -52,6 +52,10 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object """ _validation = { @@ -70,6 +74,7 @@ class SqlSink(CopySink): 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } def __init__(self, **kwargs): @@ -78,4 +83,5 @@ def __init__(self, **kwargs): self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py index 115cc3a899e9..1f6bb9685082 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -52,6 +52,10 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object """ _validation = { @@ -70,12 +74,14 @@ class SqlSink(CopySink): 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py index 748cf7cba53c..ff16595aa8c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py index bd967ce52876..2842ef9ae35c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py deleted file mode 100644 index 043c537ad860..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_translator import CopyTranslator - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, - Group: MyGroup, Name: MyName" Type: string (or Expression with resultType - string). - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and - hierarchical data. Example: {"Column1": "$.Column1", "Column2": - "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or - Expression with resultType object). - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is - going to do cross-apply. Type: object (or Expression with resultType - object). - :type collection_reference: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TabularTranslator, self).__init__(**kwargs) - self.column_mappings = kwargs.get('column_mappings', None) - self.schema_mapping = kwargs.get('schema_mapping', None) - self.collection_reference = kwargs.get('collection_reference', None) - self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py deleted file mode 100644 index cb1c11e5bb53..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_translator_py3 import CopyTranslator - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, - Group: MyGroup, Name: MyName" Type: string (or Expression with resultType - string). - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and - hierarchical data. Example: {"Column1": "$.Column1", "Column2": - "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or - Expression with resultType object). - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is - going to do cross-apply. Type: object (or Expression with resultType - object). - :type collection_reference: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, **kwargs) -> None: - super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) - self.column_mappings = column_mappings - self.schema_mapping = schema_mapping - self.collection_reference = collection_reference - self.type = 'TabularTranslator' From 9dfd055942a56d3acce5bbcca86150031b9c0f1f Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 19 Jun 2019 20:21:05 -0700 Subject: [PATCH 03/30] [AutoPR datafactory/resource-manager] (Public swagger update) Add TeradataSource,TeradataPartitionSettings,TeradataTableDataset,TeradataTableDatasetTypeProperties (#5865) * Generated from d2b6a0a231eeeef8cd8f82383d786706289b8b75 add TerdateTableDataset,TeradataSource * Generated from 0fb95a04203b7d79f6f007221e2c34535b0c3baf modify specified --- .../azure/mgmt/datafactory/models/__init__.py | 9 +++ .../mgmt/datafactory/models/copy_source.py | 10 +-- .../datafactory/models/copy_source_py3.py | 10 +-- .../azure/mgmt/datafactory/models/dataset.py | 4 +- .../mgmt/datafactory/models/dataset_py3.py | 4 +- .../models/teradata_linked_service.py | 10 ++- .../models/teradata_linked_service_py3.py | 12 ++- .../models/teradata_partition_settings.py | 42 ++++++++++ .../models/teradata_partition_settings_py3.py | 42 ++++++++++ .../datafactory/models/teradata_source.py | 68 ++++++++++++++++ .../datafactory/models/teradata_source_py3.py | 68 ++++++++++++++++ .../models/teradata_table_dataset.py | 77 +++++++++++++++++++ .../models/teradata_table_dataset_py3.py | 77 +++++++++++++++++++ 13 files changed, 412 insertions(+), 21 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index f08086a1e9f9..4414f045cddd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -241,6 +241,7 @@ from .salesforce_object_dataset_py3 import SalesforceObjectDataset from .relational_table_dataset_py3 import RelationalTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .teradata_table_dataset_py3 import TeradataTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset @@ -328,6 +329,8 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .teradata_partition_settings_py3 import TeradataPartitionSettings + from .teradata_source_py3 import TeradataSource from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource @@ -695,6 +698,7 @@ from .salesforce_object_dataset import SalesforceObjectDataset from .relational_table_dataset import RelationalTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .teradata_table_dataset import TeradataTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset @@ -782,6 +786,8 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .teradata_partition_settings import TeradataPartitionSettings + from .teradata_source import TeradataSource from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource @@ -1217,6 +1223,7 @@ 'SalesforceObjectDataset', 'RelationalTableDataset', 'AzureMySqlTableDataset', + 'TeradataTableDataset', 'OracleTableDataset', 'ODataResourceDataset', 'CosmosDbMongoDbApiCollectionDataset', @@ -1304,6 +1311,8 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', + 'TeradataPartitionSettings', + 'TeradataSource', 'OraclePartitionSettings', 'OracleSource', 'AzureDataExplorerSource', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index c261c385de8d..99918f1e245c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -26,10 +26,10 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, - AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, - SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, + OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, + FileSystemSource, SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, + RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, DelimitedTextSource, ParquetSource @@ -67,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 5a0b7d807b7f..9f301bc3211e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -26,10 +26,10 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, - AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, - SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, + OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, + FileSystemSource, SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, + RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, DelimitedTextSource, ParquetSource @@ -67,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 76d8375b3da9..c95b5c3da543 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -34,7 +34,7 @@ class Dataset(Model): SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - OracleTableDataset, ODataResourceDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, @@ -90,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index c793e32f2251..c0cfdb972550 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -34,7 +34,7 @@ class Dataset(Model): SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - OracleTableDataset, ODataResourceDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, @@ -90,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index 78b89638b359..6e02b0d389ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -33,8 +33,11 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -72,6 +75,7 @@ class TeradataLinkedService(LinkedService): def __init__(self, **kwargs): super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index e80b776454c0..aac40efe69e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -33,8 +33,11 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -70,8 +73,9 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.username = username diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py new file mode 100644 index 000000000000..81f55ffad16c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range or hash partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py new file mode 100644 index 000000000000..b8b4032e8de4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range or hash partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py new file mode 100644 index 000000000000..3e8a9adfab9a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py new file mode 100644 index 000000000000..f01fb531eb55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py new file mode 100644 index 000000000000..e396bfd6fb15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py new file mode 100644 index 000000000000..892707b7f133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable' From 07b543f28f4753400b192e8d0a5588a7ae77ab11 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 26 Jun 2019 16:42:27 -0700 Subject: [PATCH 04/30] [AutoPR datafactory/resource-manager] fix public swagger issues (#5985) * Generated from b0ddfd5a2aefefdca6d220fd03714b3fdfc779a6 modify swagger * Generated from 76032c5b6d424dceb3a9b03b7df79e009eb5c183 Change XxxSetting to XxxSettings in private swagger --- .../azure/mgmt/datafactory/models/__init__.py | 139 +++++++++--------- ..._setting.py => amazon_s3_read_settings.py} | 6 +- ..._py3.py => amazon_s3_read_settings_py3.py} | 6 +- ...ting.py => azure_blob_fs_read_settings.py} | 6 +- ....py => azure_blob_fs_read_settings_py3.py} | 6 +- ...ing.py => azure_blob_fs_write_settings.py} | 6 +- ...py => azure_blob_fs_write_settings_py3.py} | 8 +- ...py => azure_blob_storage_read_settings.py} | 6 +- ...> azure_blob_storage_read_settings_py3.py} | 6 +- ...y => azure_blob_storage_write_settings.py} | 6 +- ... azure_blob_storage_write_settings_py3.py} | 6 +- ...=> azure_data_lake_store_read_settings.py} | 6 +- ...zure_data_lake_store_read_settings_py3.py} | 6 +- ...> azure_data_lake_store_write_settings.py} | 6 +- ...ure_data_lake_store_write_settings_py3.py} | 6 +- .../models/azure_search_index_sink.py | 7 +- .../models/azure_search_index_sink_py3.py | 7 +- .../datafactory/models/cassandra_source.py | 9 +- .../models/cassandra_source_py3.py | 9 +- .../mgmt/datafactory/models/copy_source.py | 10 +- .../datafactory/models/copy_source_py3.py | 10 +- .../data_factory_management_client_enums.py | 38 +++++ .../azure/mgmt/datafactory/models/dataset.py | 4 +- .../mgmt/datafactory/models/dataset_py3.py | 4 +- ...ing.py => delimited_text_read_settings.py} | 6 +- ...py => delimited_text_read_settings_py3.py} | 6 +- .../datafactory/models/delimited_text_sink.py | 8 +- .../models/delimited_text_sink_py3.py | 8 +- .../models/delimited_text_source.py | 8 +- .../models/delimited_text_source_py3.py | 8 +- ...ng.py => delimited_text_write_settings.py} | 6 +- ...y => delimited_text_write_settings_py3.py} | 6 +- .../mgmt/datafactory/models/dynamics_sink.py | 7 +- .../datafactory/models/dynamics_sink_py3.py | 7 +- ...etting.py => file_server_read_settings.py} | 6 +- ...y3.py => file_server_read_settings_py3.py} | 6 +- ...tting.py => file_server_write_settings.py} | 6 +- ...3.py => file_server_write_settings_py3.py} | 8 +- ...ead_setting.py => format_read_settings.py} | 4 +- ...ing_py3.py => format_read_settings_py3.py} | 4 +- ...te_setting.py => format_write_settings.py} | 4 +- ...ng_py3.py => format_write_settings_py3.py} | 4 +- ...p_read_setting.py => ftp_read_settings.py} | 6 +- ...etting_py3.py => ftp_read_settings_py3.py} | 6 +- ..._read_setting.py => hdfs_read_settings.py} | 6 +- ...tting_py3.py => hdfs_read_settings_py3.py} | 6 +- ..._read_setting.py => http_read_settings.py} | 6 +- ...tting_py3.py => http_read_settings_py3.py} | 6 +- .../models/oracle_partition_settings.py | 46 ------ .../models/oracle_partition_settings_py3.py | 46 ------ .../mgmt/datafactory/models/oracle_source.py | 11 -- .../datafactory/models/oracle_source_py3.py | 13 +- .../mgmt/datafactory/models/parquet_sink.py | 4 +- .../datafactory/models/parquet_sink_py3.py | 4 +- .../mgmt/datafactory/models/parquet_source.py | 4 +- .../datafactory/models/parquet_source_py3.py | 4 +- .../datafactory/models/salesforce_sink.py | 7 +- .../datafactory/models/salesforce_sink_py3.py | 7 +- .../datafactory/models/salesforce_source.py | 7 +- .../models/salesforce_source_py3.py | 7 +- .../models/sap_cloud_for_customer_sink.py | 7 +- .../models/sap_cloud_for_customer_sink_py3.py | 7 +- .../models/sap_table_partition_settings.py | 47 ------ .../sap_table_partition_settings_py3.py | 47 ------ .../datafactory/models/sap_table_source.py | 11 -- .../models/sap_table_source_py3.py | 13 +- ..._read_setting.py => sftp_read_settings.py} | 6 +- ...tting_py3.py => sftp_read_settings_py3.py} | 6 +- ...read_setting.py => store_read_settings.py} | 4 +- ...ting_py3.py => store_read_settings_py3.py} | 4 +- ...ite_setting.py => store_write_settings.py} | 4 +- ...ing_py3.py => store_write_settings_py3.py} | 4 +- .../models/teradata_linked_service.py | 10 +- .../models/teradata_linked_service_py3.py | 12 +- .../models/teradata_partition_settings.py | 42 ------ .../models/teradata_partition_settings_py3.py | 42 ------ .../datafactory/models/teradata_source.py | 68 --------- .../datafactory/models/teradata_source_py3.py | 68 --------- .../models/teradata_table_dataset.py | 77 ---------- .../models/teradata_table_dataset_py3.py | 77 ---------- 80 files changed, 312 insertions(+), 875 deletions(-) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{amazon_s3_read_setting.py => amazon_s3_read_settings.py} (95%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{amazon_s3_read_setting_py3.py => amazon_s3_read_settings_py3.py} (92%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_fs_read_setting.py => azure_blob_fs_read_settings.py} (95%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_fs_read_setting_py3.py => azure_blob_fs_read_settings_py3.py} (92%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_fs_write_setting.py => azure_blob_fs_write_settings.py} (89%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{file_server_write_setting_py3.py => azure_blob_fs_write_settings_py3.py} (81%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_storage_read_setting.py => azure_blob_storage_read_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_storage_read_setting_py3.py => azure_blob_storage_read_settings_py3.py} (92%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_storage_write_setting.py => azure_blob_storage_write_settings.py} (89%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_storage_write_setting_py3.py => azure_blob_storage_write_settings_py3.py} (83%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_data_lake_store_read_setting.py => azure_data_lake_store_read_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_data_lake_store_read_setting_py3.py => azure_data_lake_store_read_settings_py3.py} (91%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_data_lake_store_write_setting.py => azure_data_lake_store_write_settings.py} (89%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_data_lake_store_write_setting_py3.py => azure_data_lake_store_write_settings_py3.py} (83%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{delimited_text_read_setting.py => delimited_text_read_settings.py} (89%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{delimited_text_read_setting_py3.py => delimited_text_read_settings_py3.py} (86%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{delimited_text_write_setting.py => delimited_text_write_settings.py} (90%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{delimited_text_write_setting_py3.py => delimited_text_write_settings_py3.py} (88%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{file_server_read_setting.py => file_server_read_settings.py} (95%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{file_server_read_setting_py3.py => file_server_read_settings_py3.py} (92%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{file_server_write_setting.py => file_server_write_settings.py} (89%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_blob_fs_write_setting_py3.py => file_server_write_settings_py3.py} (88%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{format_read_setting.py => format_read_settings.py} (93%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{format_read_setting_py3.py => format_read_settings_py3.py} (93%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{format_write_setting.py => format_write_settings.py} (92%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{format_write_setting_py3.py => format_write_settings_py3.py} (92%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{ftp_read_setting.py => ftp_read_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{ftp_read_setting_py3.py => ftp_read_settings_py3.py} (90%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{hdfs_read_setting.py => hdfs_read_settings.py} (95%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{hdfs_read_setting_py3.py => hdfs_read_settings_py3.py} (93%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{http_read_setting.py => http_read_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{http_read_setting_py3.py => http_read_settings_py3.py} (90%) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{sftp_read_setting.py => sftp_read_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{sftp_read_setting_py3.py => sftp_read_settings_py3.py} (91%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{connector_read_setting.py => store_read_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{connector_read_setting_py3.py => store_read_settings_py3.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{connector_write_setting.py => store_write_settings.py} (94%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{connector_write_setting_py3.py => store_write_settings_py3.py} (95%) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 4414f045cddd..27c22b42e671 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -241,7 +241,6 @@ from .salesforce_object_dataset_py3 import SalesforceObjectDataset from .relational_table_dataset_py3 import RelationalTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset - from .teradata_table_dataset_py3 import TeradataTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset @@ -329,9 +328,6 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource - from .teradata_partition_settings_py3 import TeradataPartitionSettings - from .teradata_source_py3 import TeradataSource - from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -344,7 +340,6 @@ from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource from .rest_source_py3 import RestSource - from .sap_table_partition_settings_py3 import SapTablePartitionSettings from .sap_table_source_py3 import SapTableSource from .sap_open_hub_source_py3 import SapOpenHubSource from .sap_hana_source_py3 import SapHanaSource @@ -356,18 +351,18 @@ from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource - from .format_read_setting_py3 import FormatReadSetting - from .delimited_text_read_setting_py3 import DelimitedTextReadSetting - from .hdfs_read_setting_py3 import HdfsReadSetting - from .http_read_setting_py3 import HttpReadSetting - from .sftp_read_setting_py3 import SftpReadSetting - from .ftp_read_setting_py3 import FtpReadSetting - from .file_server_read_setting_py3 import FileServerReadSetting - from .amazon_s3_read_setting_py3 import AmazonS3ReadSetting - from .azure_data_lake_store_read_setting_py3 import AzureDataLakeStoreReadSetting - from .azure_blob_fs_read_setting_py3 import AzureBlobFSReadSetting - from .azure_blob_storage_read_setting_py3 import AzureBlobStorageReadSetting - from .connector_read_setting_py3 import ConnectorReadSetting + from .format_read_settings_py3 import FormatReadSettings + from .delimited_text_read_settings_py3 import DelimitedTextReadSettings + from .hdfs_read_settings_py3 import HdfsReadSettings + from .http_read_settings_py3 import HttpReadSettings + from .sftp_read_settings_py3 import SftpReadSettings + from .ftp_read_settings_py3 import FtpReadSettings + from .file_server_read_settings_py3 import FileServerReadSettings + from .amazon_s3_read_settings_py3 import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings_py3 import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings + from .store_read_settings_py3 import StoreReadSettings from .delimited_text_source_py3 import DelimitedTextSource from .parquet_source_py3 import ParquetSource from .copy_source_py3 import CopySource @@ -406,17 +401,17 @@ from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink - from .file_server_write_setting_py3 import FileServerWriteSetting - from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting - from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting - from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting - from .connector_write_setting_py3 import ConnectorWriteSetting + from .file_server_write_settings_py3 import FileServerWriteSettings + from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings + from .store_write_settings_py3 import StoreWriteSettings from .parquet_sink_py3 import ParquetSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink - from .format_write_setting_py3 import FormatWriteSetting - from .delimited_text_write_setting_py3 import DelimitedTextWriteSetting + from .format_write_settings_py3 import FormatWriteSettings + from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity @@ -698,7 +693,6 @@ from .salesforce_object_dataset import SalesforceObjectDataset from .relational_table_dataset import RelationalTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset - from .teradata_table_dataset import TeradataTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset @@ -786,9 +780,6 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource - from .teradata_partition_settings import TeradataPartitionSettings - from .teradata_source import TeradataSource - from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -801,7 +792,6 @@ from .sql_server_source import SqlServerSource from .sql_source import SqlSource from .rest_source import RestSource - from .sap_table_partition_settings import SapTablePartitionSettings from .sap_table_source import SapTableSource from .sap_open_hub_source import SapOpenHubSource from .sap_hana_source import SapHanaSource @@ -813,18 +803,18 @@ from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource - from .format_read_setting import FormatReadSetting - from .delimited_text_read_setting import DelimitedTextReadSetting - from .hdfs_read_setting import HdfsReadSetting - from .http_read_setting import HttpReadSetting - from .sftp_read_setting import SftpReadSetting - from .ftp_read_setting import FtpReadSetting - from .file_server_read_setting import FileServerReadSetting - from .amazon_s3_read_setting import AmazonS3ReadSetting - from .azure_data_lake_store_read_setting import AzureDataLakeStoreReadSetting - from .azure_blob_fs_read_setting import AzureBlobFSReadSetting - from .azure_blob_storage_read_setting import AzureBlobStorageReadSetting - from .connector_read_setting import ConnectorReadSetting + from .format_read_settings import FormatReadSettings + from .delimited_text_read_settings import DelimitedTextReadSettings + from .hdfs_read_settings import HdfsReadSettings + from .http_read_settings import HttpReadSettings + from .sftp_read_settings import SftpReadSettings + from .ftp_read_settings import FtpReadSettings + from .file_server_read_settings import FileServerReadSettings + from .amazon_s3_read_settings import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings + from .store_read_settings import StoreReadSettings from .delimited_text_source import DelimitedTextSource from .parquet_source import ParquetSource from .copy_source import CopySource @@ -863,17 +853,17 @@ from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink - from .file_server_write_setting import FileServerWriteSetting - from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting - from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting - from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting - from .connector_write_setting import ConnectorWriteSetting + from .file_server_write_settings import FileServerWriteSettings + from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings + from .store_write_settings import StoreWriteSettings from .parquet_sink import ParquetSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink - from .format_write_setting import FormatWriteSetting - from .delimited_text_write_setting import DelimitedTextWriteSetting + from .format_write_settings import FormatWriteSettings + from .delimited_text_write_settings import DelimitedTextWriteSettings from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity @@ -974,9 +964,14 @@ SybaseAuthenticationType, AzureFunctionActivityMethod, WebActivityMethod, + CassandraSourceReadConsistencyLevels, StoredProcedureParameterType, + SalesforceSourceReadBehavior, HDInsightActivityDebugInfoOption, + SalesforceSinkWriteBehavior, + AzureSearchIndexWriteBehaviorType, PolybaseSettingsRejectType, + SapCloudForCustomerSinkWriteBehavior, WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, @@ -1223,7 +1218,6 @@ 'SalesforceObjectDataset', 'RelationalTableDataset', 'AzureMySqlTableDataset', - 'TeradataTableDataset', 'OracleTableDataset', 'ODataResourceDataset', 'CosmosDbMongoDbApiCollectionDataset', @@ -1311,9 +1305,6 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', - 'TeradataPartitionSettings', - 'TeradataSource', - 'OraclePartitionSettings', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1326,7 +1317,6 @@ 'SqlServerSource', 'SqlSource', 'RestSource', - 'SapTablePartitionSettings', 'SapTableSource', 'SapOpenHubSource', 'SapHanaSource', @@ -1338,18 +1328,18 @@ 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', - 'FormatReadSetting', - 'DelimitedTextReadSetting', - 'HdfsReadSetting', - 'HttpReadSetting', - 'SftpReadSetting', - 'FtpReadSetting', - 'FileServerReadSetting', - 'AmazonS3ReadSetting', - 'AzureDataLakeStoreReadSetting', - 'AzureBlobFSReadSetting', - 'AzureBlobStorageReadSetting', - 'ConnectorReadSetting', + 'FormatReadSettings', + 'DelimitedTextReadSettings', + 'HdfsReadSettings', + 'HttpReadSettings', + 'SftpReadSettings', + 'FtpReadSettings', + 'FileServerReadSettings', + 'AmazonS3ReadSettings', + 'AzureDataLakeStoreReadSettings', + 'AzureBlobFSReadSettings', + 'AzureBlobStorageReadSettings', + 'StoreReadSettings', 'DelimitedTextSource', 'ParquetSource', 'CopySource', @@ -1388,17 +1378,17 @@ 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', - 'FileServerWriteSetting', - 'AzureDataLakeStoreWriteSetting', - 'AzureBlobFSWriteSetting', - 'AzureBlobStorageWriteSetting', - 'ConnectorWriteSetting', + 'FileServerWriteSettings', + 'AzureDataLakeStoreWriteSettings', + 'AzureBlobFSWriteSettings', + 'AzureBlobStorageWriteSettings', + 'StoreWriteSettings', 'ParquetSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', - 'FormatWriteSetting', - 'DelimitedTextWriteSetting', + 'FormatWriteSettings', + 'DelimitedTextWriteSettings', 'DelimitedTextSink', 'CopySink', 'CopyActivity', @@ -1498,9 +1488,14 @@ 'SybaseAuthenticationType', 'AzureFunctionActivityMethod', 'WebActivityMethod', + 'CassandraSourceReadConsistencyLevels', 'StoredProcedureParameterType', + 'SalesforceSourceReadBehavior', 'HDInsightActivityDebugInfoOption', + 'SalesforceSinkWriteBehavior', + 'AzureSearchIndexWriteBehaviorType', 'PolybaseSettingsRejectType', + 'SapCloudForCustomerSinkWriteBehavior', 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py similarity index 95% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py index 4de7e0ebb7b9..e83910136070 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class AmazonS3ReadSetting(ConnectorReadSetting): +class AmazonS3ReadSettings(StoreReadSettings): """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. @@ -68,7 +68,7 @@ class AmazonS3ReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(AmazonS3ReadSetting, self).__init__(**kwargs) + super(AmazonS3ReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py similarity index 92% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py index deda331ea561..79645a869ac8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class AmazonS3ReadSetting(ConnectorReadSetting): +class AmazonS3ReadSettings(StoreReadSettings): """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. @@ -68,7 +68,7 @@ class AmazonS3ReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AmazonS3ReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py similarity index 95% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py index 11490a288417..6d80ce72ea57 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class AzureBlobFSReadSetting(ConnectorReadSetting): +class AzureBlobFSReadSettings(StoreReadSettings): """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class AzureBlobFSReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(AzureBlobFSReadSetting, self).__init__(**kwargs) + super(AzureBlobFSReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py similarity index 92% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py index 28f3b4f7ceb4..af4746e84f8e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class AzureBlobFSReadSetting(ConnectorReadSetting): +class AzureBlobFSReadSettings(StoreReadSettings): """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class AzureBlobFSReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobFSReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py similarity index 89% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py index d5b2d850da58..5de93c10a1f8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting import ConnectorWriteSetting +from .store_write_settings import StoreWriteSettings -class AzureBlobFSWriteSetting(ConnectorWriteSetting): +class AzureBlobFSWriteSettings(StoreWriteSettings): """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class AzureBlobFSWriteSetting(ConnectorWriteSetting): } def __init__(self, **kwargs): - super(AzureBlobFSWriteSetting, self).__init__(**kwargs) + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py similarity index 81% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py index 1ed4bf220417..be0008fd8733 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py @@ -9,11 +9,11 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting_py3 import ConnectorWriteSetting +from .store_write_settings_py3 import StoreWriteSettings -class FileServerWriteSetting(ConnectorWriteSetting): - """File server write settings. +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class FileServerWriteSetting(ConnectorWriteSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileServerWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py index ee07a3576f29..42b11cc6de16 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class AzureBlobStorageReadSetting(ConnectorReadSetting): +class AzureBlobStorageReadSettings(StoreReadSettings): """Azure blob read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class AzureBlobStorageReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(AzureBlobStorageReadSetting, self).__init__(**kwargs) + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py similarity index 92% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py index 3e3d35774a46..495ea16afd98 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class AzureBlobStorageReadSetting(ConnectorReadSetting): +class AzureBlobStorageReadSettings(StoreReadSettings): """Azure blob read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class AzureBlobStorageReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobStorageReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py similarity index 89% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py index a6499dfda798..08becf208a3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting import ConnectorWriteSetting +from .store_write_settings import StoreWriteSettings -class AzureBlobStorageWriteSetting(ConnectorWriteSetting): +class AzureBlobStorageWriteSettings(StoreWriteSettings): """Azure blob write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class AzureBlobStorageWriteSetting(ConnectorWriteSetting): } def __init__(self, **kwargs): - super(AzureBlobStorageWriteSetting, self).__init__(**kwargs) + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py similarity index 83% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py index 9abb68c06055..40f2e0103693 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting_py3 import ConnectorWriteSetting +from .store_write_settings_py3 import StoreWriteSettings -class AzureBlobStorageWriteSetting(ConnectorWriteSetting): +class AzureBlobStorageWriteSettings(StoreWriteSettings): """Azure blob write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class AzureBlobStorageWriteSetting(ConnectorWriteSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobStorageWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py index 0f0dfe7f7c58..213d69966baf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class AzureDataLakeStoreReadSetting(ConnectorReadSetting): +class AzureDataLakeStoreReadSettings(StoreReadSettings): """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class AzureDataLakeStoreReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(AzureDataLakeStoreReadSetting, self).__init__(**kwargs) + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py similarity index 91% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py index b9159463d681..b4bccc5e78a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class AzureDataLakeStoreReadSetting(ConnectorReadSetting): +class AzureDataLakeStoreReadSettings(StoreReadSettings): """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class AzureDataLakeStoreReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureDataLakeStoreReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py similarity index 89% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py index d7875f545e77..a372606d86ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting import ConnectorWriteSetting +from .store_write_settings import StoreWriteSettings -class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): } def __init__(self, **kwargs): - super(AzureDataLakeStoreWriteSetting, self).__init__(**kwargs) + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py similarity index 83% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py index e05ddcbaeaac..a48cade879c6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting_py3 import ConnectorWriteSetting +from .store_write_settings_py3 import StoreWriteSettings -class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index af2505be7a5c..9aae64af8da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -41,8 +41,9 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. - :type write_behavior: object + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { @@ -57,7 +58,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 9e57f2f1feb3..3cd887a2512c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -41,8 +41,9 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. - :type write_behavior: object + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { @@ -57,7 +58,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index 8a52f03cd5ba..e7ba96c18682 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -42,8 +42,11 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. - :type consistency_level: object + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -57,7 +60,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index 6957385bab86..bd95d158b868 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -42,8 +42,11 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. - :type consistency_level: object + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -57,7 +60,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 99918f1e245c..c261c385de8d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -26,10 +26,10 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, - OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, - FileSystemSource, SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, - RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, DelimitedTextSource, ParquetSource @@ -67,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 9f301bc3211e..5a0b7d807b7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -26,10 +26,10 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, - OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, - FileSystemSource, SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, - RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, DelimitedTextSource, ParquetSource @@ -67,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index ded527b2602a..f6b7388285d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -340,6 +340,20 @@ class WebActivityMethod(str, Enum): delete = "DELETE" +class CassandraSourceReadConsistencyLevels(str, Enum): + + all = "ALL" + each_quorum = "EACH_QUORUM" + quorum = "QUORUM" + local_quorum = "LOCAL_QUORUM" + one = "ONE" + two = "TWO" + three = "THREE" + local_one = "LOCAL_ONE" + serial = "SERIAL" + local_serial = "LOCAL_SERIAL" + + class StoredProcedureParameterType(str, Enum): string = "String" @@ -351,6 +365,12 @@ class StoredProcedureParameterType(str, Enum): date_enum = "Date" +class SalesforceSourceReadBehavior(str, Enum): + + query = "Query" + query_all = "QueryAll" + + class HDInsightActivityDebugInfoOption(str, Enum): none = "None" @@ -358,12 +378,30 @@ class HDInsightActivityDebugInfoOption(str, Enum): failure = "Failure" +class SalesforceSinkWriteBehavior(str, Enum): + + insert = "Insert" + upsert = "Upsert" + + +class AzureSearchIndexWriteBehaviorType(str, Enum): + + merge = "Merge" + upload = "Upload" + + class PolybaseSettingsRejectType(str, Enum): value = "value" percentage = "percentage" +class SapCloudForCustomerSinkWriteBehavior(str, Enum): + + insert = "Insert" + update = "Update" + + class WebHookActivityMethod(str, Enum): post = "POST" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index c95b5c3da543..76d8375b3da9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -34,7 +34,7 @@ class Dataset(Model): SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, @@ -90,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index c0cfdb972550..c793e32f2251 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -34,7 +34,7 @@ class Dataset(Model): SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, @@ -90,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py similarity index 89% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py index 004eb595a05e..364b103c426a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .format_read_setting import FormatReadSetting +from .format_read_settings import FormatReadSettings -class DelimitedTextReadSetting(FormatReadSetting): +class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. All required parameters must be populated in order to send to Azure. @@ -39,5 +39,5 @@ class DelimitedTextReadSetting(FormatReadSetting): } def __init__(self, **kwargs): - super(DelimitedTextReadSetting, self).__init__(**kwargs) + super(DelimitedTextReadSettings, self).__init__(**kwargs) self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py similarity index 86% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py index 87915fcb3db7..62aa0327cfb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .format_read_setting_py3 import FormatReadSetting +from .format_read_settings_py3 import FormatReadSettings -class DelimitedTextReadSetting(FormatReadSetting): +class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. All required parameters must be populated in order to send to Azure. @@ -39,5 +39,5 @@ class DelimitedTextReadSetting(FormatReadSetting): } def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: - super(DelimitedTextReadSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py index ae93f209c8b3..15e0e590b4ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -41,10 +41,10 @@ class DelimitedTextSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { @@ -59,8 +59,8 @@ class DelimitedTextSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py index a1ba953a2662..6481f8021527 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -41,10 +41,10 @@ class DelimitedTextSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { @@ -59,8 +59,8 @@ class DelimitedTextSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py index 9f2067d24b9c..10a842ca374a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -34,10 +34,10 @@ class DelimitedTextSource(CopySource): :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings """ _validation = { @@ -50,8 +50,8 @@ class DelimitedTextSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py index b158f97bde81..e551e32c847e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -34,10 +34,10 @@ class DelimitedTextSource(CopySource): :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings """ _validation = { @@ -50,8 +50,8 @@ class DelimitedTextSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py similarity index 90% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py index 21fe168f1316..5e0d8db319e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .format_write_setting import FormatWriteSetting +from .format_write_settings import FormatWriteSettings -class DelimitedTextWriteSetting(FormatWriteSetting): +class DelimitedTextWriteSettings(FormatWriteSettings): """Delimited text write settings. All required parameters must be populated in order to send to Azure. @@ -44,6 +44,6 @@ class DelimitedTextWriteSetting(FormatWriteSetting): } def __init__(self, **kwargs): - super(DelimitedTextWriteSetting, self).__init__(**kwargs) + super(DelimitedTextWriteSettings, self).__init__(**kwargs) self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py similarity index 88% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py index ac0e3b2d00cc..2be019ab1e6a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .format_write_setting_py3 import FormatWriteSetting +from .format_write_settings_py3 import FormatWriteSettings -class DelimitedTextWriteSetting(FormatWriteSetting): +class DelimitedTextWriteSettings(FormatWriteSettings): """Delimited text write settings. All required parameters must be populated in order to send to Azure. @@ -44,6 +44,6 @@ class DelimitedTextWriteSetting(FormatWriteSetting): } def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: - super(DelimitedTextWriteSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) self.quote_all_text = quote_all_text self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 5afce6ced25b..45bac7b52064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -44,7 +44,8 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - :vartype write_behavior: object + Default value: "Upsert" . + :vartype write_behavior: str :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -64,11 +65,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = None + write_behavior = "Upsert" def __init__(self, **kwargs): super(DynamicsSink, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index ffdb08363bfd..5f736f9cf658 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -44,7 +44,8 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - :vartype write_behavior: object + Default value: "Upsert" . + :vartype write_behavior: str :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -64,11 +65,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = None + write_behavior = "Upsert" def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py similarity index 95% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py index 6ba2a5f56b79..da9d0809e03a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class FileServerReadSetting(ConnectorReadSetting): +class FileServerReadSettings(StoreReadSettings): """File server read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class FileServerReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(FileServerReadSetting, self).__init__(**kwargs) + super(FileServerReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py similarity index 92% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py index 4393692d63f3..1fadb49b1795 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class FileServerReadSetting(ConnectorReadSetting): +class FileServerReadSettings(StoreReadSettings): """File server read settings. All required parameters must be populated in order to send to Azure. @@ -64,7 +64,7 @@ class FileServerReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(FileServerReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py similarity index 89% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py index 9342210abdfb..f254e46452de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting import ConnectorWriteSetting +from .store_write_settings import StoreWriteSettings -class FileServerWriteSetting(ConnectorWriteSetting): +class FileServerWriteSettings(StoreWriteSettings): """File server write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class FileServerWriteSetting(ConnectorWriteSetting): } def __init__(self, **kwargs): - super(FileServerWriteSetting, self).__init__(**kwargs) + super(FileServerWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py similarity index 88% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py index 62196ff73838..26a48aca46f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py @@ -9,11 +9,11 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_write_setting_py3 import ConnectorWriteSetting +from .store_write_settings_py3 import StoreWriteSettings -class AzureBlobFSWriteSetting(ConnectorWriteSetting): - """Azure blobFS write settings. +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. All required parameters must be populated in order to send to Azure. @@ -42,4 +42,4 @@ class AzureBlobFSWriteSetting(ConnectorWriteSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py similarity index 93% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py index 730cec9f525f..d5213138b96a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class FormatReadSetting(Model): +class FormatReadSettings(Model): """Format read settings. All required parameters must be populated in order to send to Azure. @@ -34,6 +34,6 @@ class FormatReadSetting(Model): } def __init__(self, **kwargs): - super(FormatReadSetting, self).__init__(**kwargs) + super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py similarity index 93% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py index ed68bf35f009..326da0277b89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class FormatReadSetting(Model): +class FormatReadSettings(Model): """Format read settings. All required parameters must be populated in order to send to Azure. @@ -34,6 +34,6 @@ class FormatReadSetting(Model): } def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatReadSetting, self).__init__(**kwargs) + super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py similarity index 92% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py index 0fd6966859d5..2100c6055d0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class FormatWriteSetting(Model): +class FormatWriteSettings(Model): """Format write settings. All required parameters must be populated in order to send to Azure. @@ -34,6 +34,6 @@ class FormatWriteSetting(Model): } def __init__(self, **kwargs): - super(FormatWriteSetting, self).__init__(**kwargs) + super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py similarity index 92% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py index 3e5609066208..4150eceffc1c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class FormatWriteSetting(Model): +class FormatWriteSettings(Model): """Format write settings. All required parameters must be populated in order to send to Azure. @@ -34,6 +34,6 @@ class FormatWriteSetting(Model): } def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatWriteSetting, self).__init__(**kwargs) + super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py index 137a56948deb..e023f9ae91f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class FtpReadSetting(ConnectorReadSetting): +class FtpReadSettings(StoreReadSettings): """Ftp read settings. All required parameters must be populated in order to send to Azure. @@ -56,7 +56,7 @@ class FtpReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(FtpReadSetting, self).__init__(**kwargs) + super(FtpReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py similarity index 90% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py index 5294301e4fd8..748d306307ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class FtpReadSetting(ConnectorReadSetting): +class FtpReadSettings(StoreReadSettings): """Ftp read settings. All required parameters must be populated in order to send to Azure. @@ -56,7 +56,7 @@ class FtpReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: - super(FtpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py similarity index 95% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py index 4fdadbc2fcd0..ec4b98c50385 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class HdfsReadSetting(ConnectorReadSetting): +class HdfsReadSettings(StoreReadSettings): """HDFS read settings. All required parameters must be populated in order to send to Azure. @@ -67,7 +67,7 @@ class HdfsReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(HdfsReadSetting, self).__init__(**kwargs) + super(HdfsReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py similarity index 93% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py index 164a6f497e52..c37a045ec93c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class HdfsReadSetting(ConnectorReadSetting): +class HdfsReadSettings(StoreReadSettings): """HDFS read settings. All required parameters must be populated in order to send to Azure. @@ -67,7 +67,7 @@ class HdfsReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: - super(HdfsReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py index 696a9fdb3faf..a7c175da3489 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class HttpReadSetting(ConnectorReadSetting): +class HttpReadSettings(StoreReadSettings): """Sftp read settings. All required parameters must be populated in order to send to Azure. @@ -56,7 +56,7 @@ class HttpReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(HttpReadSetting, self).__init__(**kwargs) + super(HttpReadSettings, self).__init__(**kwargs) self.request_method = kwargs.get('request_method', None) self.request_body = kwargs.get('request_body', None) self.additional_headers = kwargs.get('additional_headers', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py similarity index 90% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py index 3d5d75a80785..7cea9207c996 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class HttpReadSetting(ConnectorReadSetting): +class HttpReadSettings(StoreReadSettings): """Sftp read settings. All required parameters must be populated in order to send to Azure. @@ -56,7 +56,7 @@ class HttpReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: - super(HttpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py deleted file mode 100644 index ed970fd7729a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for oracle source partitioning. - - :param partition_names: Names of the physical partitions of oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = kwargs.get('partition_names', None) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py deleted file mode 100644 index c3d00b09ad90..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for oracle source partitioning. - - :param partition_names: Names of the physical partitions of oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = partition_names - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 84ad79ed19c7..12b3aa31353f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -40,13 +40,6 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - oracle read in parallel. - :type partition_option: object - :param partition_settings: The settings that will be leveraged for oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -61,14 +54,10 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__(self, **kwargs): super(OracleSource, self).__init__(**kwargs) self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index dfcbd2e0330d..43afe27fda2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -40,13 +40,6 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - oracle read in parallel. - :type partition_option: object - :param partition_settings: The settings that will be leveraged for oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -61,14 +54,10 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout - self.partition_option = partition_option - self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py index 38c634ed10dd..dea3e0f8fc52 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -41,7 +41,7 @@ class ParquetSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { @@ -56,7 +56,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py index 96c0c1b57926..463044fef83f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -41,7 +41,7 @@ class ParquetSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { @@ -56,7 +56,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py index 02e74641d506..ab888c7361a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -34,7 +34,7 @@ class ParquetSource(CopySource): :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { @@ -47,7 +47,7 @@ class ParquetSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py index bfe077dd9999..332a7b9b8c5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -34,7 +34,7 @@ class ParquetSource(CopySource): :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { @@ -47,7 +47,7 @@ class ParquetSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 4d1a93c08915..9a1291bd4bfe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -41,8 +41,9 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. - :type write_behavior: object + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -70,7 +71,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index ed7591fbb59b..54a56618d01e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -41,8 +41,9 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. - :type write_behavior: object + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -70,7 +71,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 57a10411f487..4f2590c3ab9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -37,8 +37,9 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. - :type read_behavior: object + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { @@ -52,7 +53,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 08e6776f5f98..4441e92eaff3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -37,8 +37,9 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. - :type read_behavior: object + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { @@ -52,7 +53,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index ae99093f277e..e5a37858abb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -41,8 +41,9 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. - :type write_behavior: object + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior """ _validation = { @@ -57,7 +58,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index bdbc2cefcbd1..29f01fdd6891 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -41,8 +41,9 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. - :type write_behavior: object + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior """ _validation = { @@ -57,7 +58,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py deleted file mode 100644 index b688fe16683b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py deleted file mode 100644 index 37bdf610ab35..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py index 1c52db3eb0f8..e1865aa05535 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -55,13 +55,6 @@ class SapTableSource(CopySource): function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. - :type partition_option: object - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -80,8 +73,6 @@ class SapTableSource(CopySource): 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } def __init__(self, **kwargs): @@ -92,6 +83,4 @@ def __init__(self, **kwargs): self.rfc_table_options = kwargs.get('rfc_table_options', None) self.batch_size = kwargs.get('batch_size', None) self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py index 8e8fbdf12002..73e915a14ed3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -55,13 +55,6 @@ class SapTableSource(CopySource): function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. - :type partition_option: object - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -80,11 +73,9 @@ class SapTableSource(CopySource): 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, **kwargs) -> None: super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.row_count = row_count self.row_skips = row_skips @@ -92,6 +83,4 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.rfc_table_options = rfc_table_options self.batch_size = batch_size self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module - self.partition_option = partition_option - self.partition_settings = partition_settings self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py index e0cd7ea8fda1..5e7b4faf77ad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting import ConnectorReadSetting +from .store_read_settings import StoreReadSettings -class SftpReadSetting(ConnectorReadSetting): +class SftpReadSettings(StoreReadSettings): """Sftp read settings. All required parameters must be populated in order to send to Azure. @@ -60,7 +60,7 @@ class SftpReadSetting(ConnectorReadSetting): } def __init__(self, **kwargs): - super(SftpReadSetting, self).__init__(**kwargs) + super(SftpReadSettings, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py similarity index 91% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py index 39beb756905a..e6c27e3ad08a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from .connector_read_setting_py3 import ConnectorReadSetting +from .store_read_settings_py3 import StoreReadSettings -class SftpReadSetting(ConnectorReadSetting): +class SftpReadSettings(StoreReadSettings): """Sftp read settings. All required parameters must be populated in order to send to Azure. @@ -60,7 +60,7 @@ class SftpReadSetting(ConnectorReadSetting): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(SftpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py index e0af269aaafd..c12c0ce8860d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class ConnectorReadSetting(Model): +class StoreReadSettings(Model): """Connector read setting. All required parameters must be populated in order to send to Azure. @@ -39,7 +39,7 @@ class ConnectorReadSetting(Model): } def __init__(self, **kwargs): - super(ConnectorReadSetting, self).__init__(**kwargs) + super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = kwargs.get('type', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py index f6403ade8f71..e2026fd52b93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class ConnectorReadSetting(Model): +class StoreReadSettings(Model): """Connector read setting. All required parameters must be populated in order to send to Azure. @@ -39,7 +39,7 @@ class ConnectorReadSetting(Model): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: - super(ConnectorReadSetting, self).__init__(**kwargs) + super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py similarity index 94% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py index 65daf9f07794..aeaebc1a190e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class ConnectorWriteSetting(Model): +class StoreWriteSettings(Model): """Connector write settings. All required parameters must be populated in order to send to Azure. @@ -42,7 +42,7 @@ class ConnectorWriteSetting(Model): } def __init__(self, **kwargs): - super(ConnectorWriteSetting, self).__init__(**kwargs) + super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.type = kwargs.get('type', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py similarity index 95% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py index 7f4ea65c916d..d3dfa76faddc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py @@ -12,7 +12,7 @@ from msrest.serialization import Model -class ConnectorWriteSetting(Model): +class StoreWriteSettings(Model): """Connector write settings. All required parameters must be populated in order to send to Azure. @@ -42,7 +42,7 @@ class ConnectorWriteSetting(Model): } def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(ConnectorWriteSetting, self).__init__(**kwargs) + super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index 6e02b0d389ab..78b89638b359 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -33,11 +33,8 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -56,6 +53,7 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'server': {'required': True}, } _attribute_map = { @@ -65,7 +63,6 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -75,7 +72,6 @@ class TeradataLinkedService(LinkedService): def __init__(self, **kwargs): super(TeradataLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index aac40efe69e0..e80b776454c0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -33,11 +33,8 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -56,6 +53,7 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'server': {'required': True}, } _attribute_map = { @@ -65,7 +63,6 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -73,9 +70,8 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.username = username diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py deleted file mode 100644 index 81f55ffad16c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range or hash partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py deleted file mode 100644 index b8b4032e8de4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range or hash partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py deleted file mode 100644 index 3e8a9adfab9a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. - :type partition_option: object - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, **kwargs): - super(TeradataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py deleted file mode 100644 index f01fb531eb55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. - :type partition_option: object - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'object'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py deleted file mode 100644 index e396bfd6fb15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataTableDataset, self).__init__(**kwargs) - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) - self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py deleted file mode 100644 index 892707b7f133..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: - super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.database = database - self.table = table - self.type = 'TeradataTable' From 74b227d3cdbcc523218e07f080798439ec65ee9a Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 11 Jul 2019 08:18:37 -0700 Subject: [PATCH 05/30] [AutoPR datafactory/resource-manager] [Datafactory] Add three new connectors (#6281) * Generated from 0ee2888c7118dfe04f56d37b3bdb491b88981fff [Datafactory] Add Azure SQL Database Managed Instance, Dynamics CRM and Common Data Service for Apps * Generated from e164e4233491e47b7335ed6a797b03d18445f705 Change enum type to string --- .../azure/mgmt/datafactory/models/__init__.py | 40 ++++++ .../models/azure_sql_mi_linked_service.py | 87 +++++++++++++ .../models/azure_sql_mi_linked_service_py3.py | 87 +++++++++++++ .../models/azure_sql_mi_table_dataset.py | 82 +++++++++++++ .../models/azure_sql_mi_table_dataset_py3.py | 82 +++++++++++++ ...on_data_service_for_apps_entity_dataset.py | 72 +++++++++++ ...ata_service_for_apps_entity_dataset_py3.py | 72 +++++++++++ ...on_data_service_for_apps_linked_service.py | 115 ++++++++++++++++++ ...ata_service_for_apps_linked_service_py3.py | 115 ++++++++++++++++++ .../common_data_service_for_apps_sink.py | 77 ++++++++++++ .../common_data_service_for_apps_sink_py3.py | 77 ++++++++++++ .../common_data_service_for_apps_source.py | 58 +++++++++ ...common_data_service_for_apps_source_py3.py | 58 +++++++++ .../mgmt/datafactory/models/copy_sink.py | 13 +- .../mgmt/datafactory/models/copy_sink_py3.py | 13 +- .../mgmt/datafactory/models/copy_source.py | 9 +- .../datafactory/models/copy_source_py3.py | 9 +- .../data_factory_management_client_enums.py | 12 ++ .../azure/mgmt/datafactory/models/dataset.py | 12 +- .../mgmt/datafactory/models/dataset_py3.py | 12 +- .../models/dynamics_crm_entity_dataset.py | 72 +++++++++++ .../models/dynamics_crm_entity_dataset_py3.py | 72 +++++++++++ .../models/dynamics_crm_linked_service.py | 112 +++++++++++++++++ .../models/dynamics_crm_linked_service_py3.py | 112 +++++++++++++++++ .../datafactory/models/dynamics_crm_sink.py | 77 ++++++++++++ .../models/dynamics_crm_sink_py3.py | 77 ++++++++++++ .../datafactory/models/dynamics_crm_source.py | 58 +++++++++ .../models/dynamics_crm_source_py3.py | 58 +++++++++ .../mgmt/datafactory/models/linked_service.py | 12 +- .../datafactory/models/linked_service_py3.py | 12 +- .../mgmt/datafactory/models/sql_mi_sink.py | 87 +++++++++++++ .../datafactory/models/sql_mi_sink_py3.py | 87 +++++++++++++ .../mgmt/datafactory/models/sql_mi_source.py | 73 +++++++++++ .../datafactory/models/sql_mi_source_py3.py | 73 +++++++++++ 34 files changed, 2044 insertions(+), 40 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 27c22b42e671..87e0b8c70171 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -173,10 +173,13 @@ from .oracle_linked_service_py3 import OracleLinkedService from .file_server_linked_service_py3 import FileServerLinkedService from .hd_insight_linked_service_py3 import HDInsightLinkedService + from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService from .dynamics_linked_service_py3 import DynamicsLinkedService from .cosmos_db_linked_service_py3 import CosmosDbLinkedService from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService from .azure_batch_linked_service_py3 import AzureBatchLinkedService + from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService from .sql_server_linked_service_py3 import SqlServerLinkedService from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService @@ -250,11 +253,14 @@ from .office365_dataset_py3 import Office365Dataset from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset from .dynamics_entity_dataset_py3 import DynamicsEntityDataset from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset from .custom_dataset_py3 import CustomDataset from .cassandra_table_dataset_py3 import CassandraTableDataset from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset @@ -336,6 +342,7 @@ from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .sql_mi_source_py3 import SqlMISource from .azure_sql_source_py3 import AzureSqlSource from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource @@ -347,6 +354,8 @@ from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_source_py3 import SalesforceSource from .relational_source_py3 import RelationalSource + from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource + from .dynamics_crm_source_py3 import DynamicsCrmSource from .dynamics_source_py3 import DynamicsSource from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource @@ -387,6 +396,8 @@ from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_sink_py3 import SalesforceSink from .azure_data_explorer_sink_py3 import AzureDataExplorerSink + from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink + from .dynamics_crm_sink_py3 import DynamicsCrmSink from .dynamics_sink_py3 import DynamicsSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink @@ -395,6 +406,7 @@ from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink + from .sql_mi_sink_py3 import SqlMISink from .azure_sql_sink_py3 import AzureSqlSink from .sql_server_sink_py3 import SqlServerSink from .sql_sink_py3 import SqlSink @@ -625,10 +637,13 @@ from .oracle_linked_service import OracleLinkedService from .file_server_linked_service import FileServerLinkedService from .hd_insight_linked_service import HDInsightLinkedService + from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service import DynamicsCrmLinkedService from .dynamics_linked_service import DynamicsLinkedService from .cosmos_db_linked_service import CosmosDbLinkedService from .azure_key_vault_linked_service import AzureKeyVaultLinkedService from .azure_batch_linked_service import AzureBatchLinkedService + from .azure_sql_mi_linked_service import AzureSqlMILinkedService from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService from .sql_server_linked_service import SqlServerLinkedService from .azure_sql_dw_linked_service import AzureSqlDWLinkedService @@ -702,11 +717,14 @@ from .office365_dataset import Office365Dataset from .azure_blob_fs_dataset import AzureBlobFSDataset from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset from .dynamics_entity_dataset import DynamicsEntityDataset from .document_db_collection_dataset import DocumentDbCollectionDataset from .custom_dataset import CustomDataset from .cassandra_table_dataset import CassandraTableDataset from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset import AzureSqlMITableDataset from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset @@ -788,6 +806,7 @@ from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter + from .sql_mi_source import SqlMISource from .azure_sql_source import AzureSqlSource from .sql_server_source import SqlServerSource from .sql_source import SqlSource @@ -799,6 +818,8 @@ from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_source import SalesforceSource from .relational_source import RelationalSource + from .common_data_service_for_apps_source import CommonDataServiceForAppsSource + from .dynamics_crm_source import DynamicsCrmSource from .dynamics_source import DynamicsSource from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource @@ -839,6 +860,8 @@ from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_sink import SalesforceSink from .azure_data_explorer_sink import AzureDataExplorerSink + from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink + from .dynamics_crm_sink import DynamicsCrmSink from .dynamics_sink import DynamicsSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink @@ -847,6 +870,7 @@ from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink + from .sql_mi_sink import SqlMISink from .azure_sql_sink import AzureSqlSink from .sql_server_sink import SqlServerSink from .sql_sink import SqlSink @@ -962,6 +986,8 @@ TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, + DynamicsDeploymentType, + DynamicsAuthenticationType, AzureFunctionActivityMethod, WebActivityMethod, CassandraSourceReadConsistencyLevels, @@ -1150,10 +1176,13 @@ 'OracleLinkedService', 'FileServerLinkedService', 'HDInsightLinkedService', + 'CommonDataServiceForAppsLinkedService', + 'DynamicsCrmLinkedService', 'DynamicsLinkedService', 'CosmosDbLinkedService', 'AzureKeyVaultLinkedService', 'AzureBatchLinkedService', + 'AzureSqlMILinkedService', 'AzureSqlDatabaseLinkedService', 'SqlServerLinkedService', 'AzureSqlDWLinkedService', @@ -1227,11 +1256,14 @@ 'Office365Dataset', 'AzureBlobFSDataset', 'AzureDataLakeStoreDataset', + 'CommonDataServiceForAppsEntityDataset', + 'DynamicsCrmEntityDataset', 'DynamicsEntityDataset', 'DocumentDbCollectionDataset', 'CustomDataset', 'CassandraTableDataset', 'AzureSqlDWTableDataset', + 'AzureSqlMITableDataset', 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', @@ -1313,6 +1345,7 @@ 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', + 'SqlMISource', 'AzureSqlSource', 'SqlServerSource', 'SqlSource', @@ -1324,6 +1357,8 @@ 'SapCloudForCustomerSource', 'SalesforceSource', 'RelationalSource', + 'CommonDataServiceForAppsSource', + 'DynamicsCrmSource', 'DynamicsSource', 'DocumentDbCollectionSource', 'BlobSource', @@ -1364,6 +1399,8 @@ 'CosmosDbMongoDbApiSink', 'SalesforceSink', 'AzureDataExplorerSink', + 'CommonDataServiceForAppsSink', + 'DynamicsCrmSink', 'DynamicsSink', 'OdbcSink', 'AzureSearchIndexSink', @@ -1372,6 +1409,7 @@ 'OracleSink', 'PolybaseSettings', 'SqlDWSink', + 'SqlMISink', 'AzureSqlSink', 'SqlServerSink', 'SqlSink', @@ -1486,6 +1524,8 @@ 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', + 'DynamicsDeploymentType', + 'DynamicsAuthenticationType', 'AzureFunctionActivityMethod', 'WebActivityMethod', 'CassandraSourceReadConsistencyLevels', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py new file mode 100644 index 000000000000..2aab3a145ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py new file mode 100644 index 000000000000..ec1a2e5e8549 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py new file mode 100644 index 000000000000..1128a9e8cb06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py new file mode 100644 index 000000000000..ac72614e3ed4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py new file mode 100644 index 000000000000..c7cd4c315852 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py new file mode 100644 index 000000000000..050bdb836963 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py new file mode 100644 index 000000000000..bbc8b7a0de65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py new file mode 100644 index 000000000000..1c4897c09868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py new file mode 100644 index 000000000000..0df48841cccc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py new file mode 100644 index 000000000000..80f85e6d5809 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py new file mode 100644 index 000000000000..13d2a6b921bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py new file mode 100644 index 000000000000..713db90f9623 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 43117547e1ea..3bae5f748dd8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -17,11 +17,12 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, - AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, - FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, DelimitedTextSink + AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, + DynamicsSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, + AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, + SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, + ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, + DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -65,7 +66,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 4f9ebc84173c..6278736affb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -17,11 +17,12 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, - AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, - FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, DelimitedTextSink + AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, + DynamicsSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, + AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, + SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, + ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, + DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -65,7 +66,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index c261c385de8d..a8f3727c086f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -28,10 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, - SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, + SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, + RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -67,7 +68,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 5a0b7d807b7f..29f8af33ab49 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -28,10 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, - SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, + SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, + RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -67,7 +68,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index f6b7388285d6..1911691ca857 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -321,6 +321,18 @@ class SybaseAuthenticationType(str, Enum): windows = "Windows" +class DynamicsDeploymentType(str, Enum): + + online = "Online" + on_premises_with_ifd = "OnPremisesWithIfd" + + +class DynamicsAuthenticationType(str, Enum): + + office365 = "Office365" + ifd = "Ifd" + + class AzureFunctionActivityMethod(str, Enum): get = "GET" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 76d8375b3da9..638f0b71b17b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -37,10 +37,12 @@ class Dataset(Model): OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, + DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -90,7 +92,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index c793e32f2251..a5a412b3f7fb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -37,10 +37,12 @@ class Dataset(Model): OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, + DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -90,7 +92,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py new file mode 100644 index 000000000000..ff4079761cf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py new file mode 100644 index 000000000000..4a1ef86b2dc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py new file mode 100644 index 000000000000..aad71042bb04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py new file mode 100644 index 000000000000..2286301fabef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py new file mode 100644 index 000000000000..2d0f462e0f59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py new file mode 100644 index 000000000000..d9f4fcf092c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py new file mode 100644 index 000000000000..641fad43f437 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py new file mode 100644 index 000000000000..29c3e78609a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 81ce26e5b657..a5b11a8c7264 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -46,11 +46,13 @@ class LinkedService(Model): AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlDatabaseLinkedService, SqlServerLinkedService, - AzureSqlDWLinkedService, AzureTableStorageLinkedService, - AzureBlobStorageLinkedService, AzureStorageLinkedService + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -86,7 +88,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index 1ec0d17d24c5..8114c9d2da19 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -46,11 +46,13 @@ class LinkedService(Model): AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlDatabaseLinkedService, SqlServerLinkedService, - AzureSqlDWLinkedService, AzureTableStorageLinkedService, - AzureBlobStorageLinkedService, AzureStorageLinkedService + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -86,7 +88,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py new file mode 100644 index 000000000000..bb03e3e48da9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py new file mode 100644 index 000000000000..27d22f47d8a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py new file mode 100644 index 000000000000..4d4db9b09281 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py new file mode 100644 index 000000000000..952bc7b4da4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlMISource' From 30a95dc0c39fe1268fff9e75d35e6db9071e9151 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 11 Jul 2019 15:21:58 +0000 Subject: [PATCH 06/30] Packaging update of azure-mgmt-datafactory --- sdk/datafactory/azure-mgmt-datafactory/README.rst | 2 +- sdk/datafactory/azure-mgmt-datafactory/setup.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/README.rst b/sdk/datafactory/azure-mgmt-datafactory/README.rst index 78282b6062d6..df8f42da7efb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/README.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/README.rst @@ -6,7 +6,7 @@ This is the Microsoft Azure Data Factory Management Client Library. Azure Resource Manager (ARM) is the next generation of management APIs that replace the old Azure Service Management (ASM). -This package has been tested with Python 2.7, 3.4, 3.5, 3.6 and 3.7. +This package has been tested with Python 2.7, 3.5, 3.6 and 3.7. For the older Azure Service Management (ASM) libraries, see `azure-servicemanagement-legacy `__ library. diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index bc211c4c2c5f..301b3319927e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -64,7 +64,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', From 08c382471a998e78321072334faf6a686008c230 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Tue, 16 Jul 2019 20:02:03 -0700 Subject: [PATCH 07/30] [AutoPR datafactory/resource-manager] [Datafactory] Add three new connectors (#6328) * Generated from 034a934c3d28b814e488fc8134b330a33f1c0c57 [Datafactory] Add three new connectors * Generated from 55361517217e7bef074e143a836e9a823256ade3 Add Informix into custom-words.txt --- .../azure/mgmt/datafactory/models/__init__.py | 47 ++++++++++ .../mgmt/datafactory/models/copy_sink.py | 16 ++-- .../mgmt/datafactory/models/copy_sink_py3.py | 16 ++-- .../mgmt/datafactory/models/copy_source.py | 19 ++-- .../datafactory/models/copy_source_py3.py | 19 ++-- .../data_factory_management_client_enums.py | 7 ++ .../azure/mgmt/datafactory/models/dataset.py | 13 +-- .../mgmt/datafactory/models/dataset_py3.py | 13 +-- .../models/informix_linked_service.py | 86 ++++++++++++++++++ .../models/informix_linked_service_py3.py | 86 ++++++++++++++++++ .../mgmt/datafactory/models/informix_sink.py | 66 ++++++++++++++ .../datafactory/models/informix_sink_py3.py | 66 ++++++++++++++ .../datafactory/models/informix_source.py | 57 ++++++++++++ .../datafactory/models/informix_source_py3.py | 57 ++++++++++++ .../models/informix_table_dataset.py | 72 +++++++++++++++ .../models/informix_table_dataset_py3.py | 72 +++++++++++++++ .../mgmt/datafactory/models/linked_service.py | 9 +- .../datafactory/models/linked_service_py3.py | 9 +- .../models/microsoft_access_linked_service.py | 86 ++++++++++++++++++ .../microsoft_access_linked_service_py3.py | 86 ++++++++++++++++++ .../models/microsoft_access_sink.py | 66 ++++++++++++++ .../models/microsoft_access_sink_py3.py | 66 ++++++++++++++ .../models/microsoft_access_source.py | 57 ++++++++++++ .../models/microsoft_access_source_py3.py | 57 ++++++++++++ .../models/microsoft_access_table_dataset.py | 72 +++++++++++++++ .../microsoft_access_table_dataset_py3.py | 72 +++++++++++++++ ...salesforce_service_cloud_linked_service.py | 87 +++++++++++++++++++ ...sforce_service_cloud_linked_service_py3.py | 87 +++++++++++++++++++ ...salesforce_service_cloud_object_dataset.py | 72 +++++++++++++++ ...sforce_service_cloud_object_dataset_py3.py | 72 +++++++++++++++ .../models/salesforce_service_cloud_sink.py | 84 ++++++++++++++++++ .../salesforce_service_cloud_sink_py3.py | 84 ++++++++++++++++++ .../models/salesforce_service_cloud_source.py | 63 ++++++++++++++ .../salesforce_service_cloud_source_py3.py | 63 ++++++++++++++ .../models/teradata_linked_service.py | 10 ++- .../models/teradata_linked_service_py3.py | 12 ++- .../models/teradata_partition_settings.py | 42 +++++++++ .../models/teradata_partition_settings_py3.py | 42 +++++++++ .../datafactory/models/teradata_source.py | 70 +++++++++++++++ .../datafactory/models/teradata_source_py3.py | 70 +++++++++++++++ .../models/teradata_table_dataset.py | 77 ++++++++++++++++ .../models/teradata_table_dataset_py3.py | 77 ++++++++++++++++ 42 files changed, 2243 insertions(+), 61 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 87e0b8c70171..c926f3904e56 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -147,6 +147,7 @@ from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService from .sap_ecc_linked_service_py3 import SapEccLinkedService from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService from .salesforce_linked_service_py3 import SalesforceLinkedService from .office365_linked_service_py3 import Office365LinkedService from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService @@ -162,6 +163,8 @@ from .web_linked_service_py3 import WebLinkedService from .odata_linked_service_py3 import ODataLinkedService from .hdfs_linked_service_py3 import HdfsLinkedService + from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService + from .informix_linked_service_py3 import InformixLinkedService from .odbc_linked_service_py3 import OdbcLinkedService from .azure_ml_linked_service_py3 import AzureMLLinkedService from .teradata_linked_service_py3 import TeradataLinkedService @@ -241,9 +244,13 @@ from .sap_hana_table_dataset_py3 import SapHanaTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset + from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset + from .informix_table_dataset_py3 import InformixTableDataset from .relational_table_dataset_py3 import RelationalTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .teradata_table_dataset_py3 import TeradataTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset @@ -334,6 +341,8 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .teradata_partition_settings_py3 import TeradataPartitionSettings + from .teradata_source_py3 import TeradataSource from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -352,7 +361,10 @@ from .sap_hana_source_py3 import SapHanaSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource + from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource from .salesforce_source_py3 import SalesforceSource + from .microsoft_access_source_py3 import MicrosoftAccessSource + from .informix_source_py3 import InformixSource from .relational_source_py3 import RelationalSource from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource from .dynamics_crm_source_py3 import DynamicsCrmSource @@ -394,11 +406,14 @@ from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink from .salesforce_sink_py3 import SalesforceSink from .azure_data_explorer_sink_py3 import AzureDataExplorerSink from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink from .dynamics_crm_sink_py3 import DynamicsCrmSink from .dynamics_sink_py3 import DynamicsSink + from .microsoft_access_sink_py3 import MicrosoftAccessSink + from .informix_sink_py3 import InformixSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink from .azure_blob_fs_sink_py3 import AzureBlobFSSink @@ -611,6 +626,7 @@ from .sap_open_hub_linked_service import SapOpenHubLinkedService from .sap_ecc_linked_service import SapEccLinkedService from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService from .salesforce_linked_service import SalesforceLinkedService from .office365_linked_service import Office365LinkedService from .azure_blob_fs_linked_service import AzureBlobFSLinkedService @@ -626,6 +642,8 @@ from .web_linked_service import WebLinkedService from .odata_linked_service import ODataLinkedService from .hdfs_linked_service import HdfsLinkedService + from .microsoft_access_linked_service import MicrosoftAccessLinkedService + from .informix_linked_service import InformixLinkedService from .odbc_linked_service import OdbcLinkedService from .azure_ml_linked_service import AzureMLLinkedService from .teradata_linked_service import TeradataLinkedService @@ -705,9 +723,13 @@ from .sap_hana_table_dataset import SapHanaTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset import SalesforceObjectDataset + from .microsoft_access_table_dataset import MicrosoftAccessTableDataset + from .informix_table_dataset import InformixTableDataset from .relational_table_dataset import RelationalTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .teradata_table_dataset import TeradataTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset @@ -798,6 +820,8 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .teradata_partition_settings import TeradataPartitionSettings + from .teradata_source import TeradataSource from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -816,7 +840,10 @@ from .sap_hana_source import SapHanaSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource + from .salesforce_service_cloud_source import SalesforceServiceCloudSource from .salesforce_source import SalesforceSource + from .microsoft_access_source import MicrosoftAccessSource + from .informix_source import InformixSource from .relational_source import RelationalSource from .common_data_service_for_apps_source import CommonDataServiceForAppsSource from .dynamics_crm_source import DynamicsCrmSource @@ -858,11 +885,14 @@ from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink import SalesforceServiceCloudSink from .salesforce_sink import SalesforceSink from .azure_data_explorer_sink import AzureDataExplorerSink from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink from .dynamics_crm_sink import DynamicsCrmSink from .dynamics_sink import DynamicsSink + from .microsoft_access_sink import MicrosoftAccessSink + from .informix_sink import InformixSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink from .azure_blob_fs_sink import AzureBlobFSSink @@ -991,6 +1021,7 @@ AzureFunctionActivityMethod, WebActivityMethod, CassandraSourceReadConsistencyLevels, + TeradataPartitionOption, StoredProcedureParameterType, SalesforceSourceReadBehavior, HDInsightActivityDebugInfoOption, @@ -1150,6 +1181,7 @@ 'SapOpenHubLinkedService', 'SapEccLinkedService', 'SapCloudForCustomerLinkedService', + 'SalesforceServiceCloudLinkedService', 'SalesforceLinkedService', 'Office365LinkedService', 'AzureBlobFSLinkedService', @@ -1165,6 +1197,8 @@ 'WebLinkedService', 'ODataLinkedService', 'HdfsLinkedService', + 'MicrosoftAccessLinkedService', + 'InformixLinkedService', 'OdbcLinkedService', 'AzureMLLinkedService', 'TeradataLinkedService', @@ -1244,9 +1278,13 @@ 'SapHanaTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', + 'SalesforceServiceCloudObjectDataset', 'SalesforceObjectDataset', + 'MicrosoftAccessTableDataset', + 'InformixTableDataset', 'RelationalTableDataset', 'AzureMySqlTableDataset', + 'TeradataTableDataset', 'OracleTableDataset', 'ODataResourceDataset', 'CosmosDbMongoDbApiCollectionDataset', @@ -1337,6 +1375,8 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', + 'TeradataPartitionSettings', + 'TeradataSource', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1355,7 +1395,10 @@ 'SapHanaSource', 'SapEccSource', 'SapCloudForCustomerSource', + 'SalesforceServiceCloudSource', 'SalesforceSource', + 'MicrosoftAccessSource', + 'InformixSource', 'RelationalSource', 'CommonDataServiceForAppsSource', 'DynamicsCrmSource', @@ -1397,11 +1440,14 @@ 'RedirectIncompatibleRowSettings', 'StagingSettings', 'CosmosDbMongoDbApiSink', + 'SalesforceServiceCloudSink', 'SalesforceSink', 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink', 'DynamicsCrmSink', 'DynamicsSink', + 'MicrosoftAccessSink', + 'InformixSink', 'OdbcSink', 'AzureSearchIndexSink', 'AzureBlobFSSink', @@ -1529,6 +1575,7 @@ 'AzureFunctionActivityMethod', 'WebActivityMethod', 'CassandraSourceReadConsistencyLevels', + 'TeradataPartitionOption', 'StoredProcedureParameterType', 'SalesforceSourceReadBehavior', 'HDInsightActivityDebugInfoOption', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 3bae5f748dd8..7631a05e1d46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -16,13 +16,13 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, - AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, - DynamicsSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, - AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, - SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, - DelimitedTextSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, ParquetSink, + AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -66,7 +66,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 6278736affb5..238da7530e0e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -16,13 +16,13 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, - AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, - DynamicsSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, - AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, - SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, - ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, - DelimitedTextSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, ParquetSink, + AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -66,7 +66,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index a8f3727c086f..a4e09622c6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -26,14 +26,15 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, - AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, - RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, - SapCloudForCustomerSource, SalesforceSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, - DelimitedTextSource, ParquetSource + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, + OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, + FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, + SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, + SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, MicrosoftAccessSource, + InformixSource, RelationalSource, CommonDataServiceForAppsSource, + DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, + AzureTableSource, DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -68,7 +69,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 29f8af33ab49..daf6fef73dad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -26,14 +26,15 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, - AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, - RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, - SapCloudForCustomerSource, SalesforceSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, - DelimitedTextSource, ParquetSource + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, + OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, + FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, + SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, + SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, MicrosoftAccessSource, + InformixSource, RelationalSource, CommonDataServiceForAppsSource, + DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, + AzureTableSource, DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -68,7 +69,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 1911691ca857..b91a62663e84 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -366,6 +366,13 @@ class CassandraSourceReadConsistencyLevels(str, Enum): local_serial = "LOCAL_SERIAL" +class TeradataPartitionOption(str, Enum): + + none = "None" + hash = "Hash" + dynamic_range = "DynamicRange" + + class StoredProcedureParameterType(str, Enum): string = "String" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 638f0b71b17b..9ed478d1a7b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -33,11 +33,12 @@ class Dataset(Model): WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, InformixTableDataset, RelationalTableDataset, + AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, @@ -92,7 +93,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index a5a412b3f7fb..7ef986b32db5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -33,11 +33,12 @@ class Dataset(Model): WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, InformixTableDataset, RelationalTableDataset, + AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, @@ -92,7 +93,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py new file mode 100644 index 000000000000..2a58e7a0f7d3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py new file mode 100644 index 000000000000..03aadada664d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py new file mode 100644 index 000000000000..c511f4ecc174 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py new file mode 100644 index 000000000000..b0681ec0d423 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py new file mode 100644 index 000000000000..6cab908c7014 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py new file mode 100644 index 000000000000..ed8fb0221239 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py new file mode 100644 index 000000000000..8b7364bff652 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py new file mode 100644 index 000000000000..05c458e797b1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index a5b11a8c7264..6a11f632875d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -38,11 +38,12 @@ class LinkedService(Model): AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - Office365LinkedService, AzureBlobFSLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, @@ -88,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index 8114c9d2da19..8fa23951db00 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -38,11 +38,12 @@ class LinkedService(Model): AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - Office365LinkedService, AzureBlobFSLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, @@ -88,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py new file mode 100644 index 000000000000..b53164f6266b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py new file mode 100644 index 000000000000..c9f79c24adf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py new file mode 100644 index 000000000000..53406fa25022 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py new file mode 100644 index 000000000000..700db840c03d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py new file mode 100644 index 000000000000..73cd3a64184c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py new file mode 100644 index 000000000000..1cccd82c8b19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py new file mode 100644 index 000000000000..f312dae024f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py new file mode 100644 index 000000000000..3fad904ef58b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py new file mode 100644 index 000000000000..fb6476ac9a30 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..3f0b3cc64d91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py new file mode 100644 index 000000000000..1f5cb3bb5bf1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..d215f5f0084d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py new file mode 100644 index 000000000000..99e2b1a2c924 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py new file mode 100644 index 000000000000..2abfaa12d0e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py new file mode 100644 index 000000000000..255bfab477bc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py new file mode 100644 index 000000000000..77bb267f5a47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index 78b89638b359..6e02b0d389ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -33,8 +33,11 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -72,6 +75,7 @@ class TeradataLinkedService(LinkedService): def __init__(self, **kwargs): super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index e80b776454c0..aac40efe69e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -33,8 +33,11 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -70,8 +73,9 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.username = username diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py new file mode 100644 index 000000000000..0f9c023f9553 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py new file mode 100644 index 000000000000..04824e614ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py new file mode 100644 index 000000000000..81d1c8e202c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py new file mode 100644 index 000000000000..79d8ccb01f14 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py new file mode 100644 index 000000000000..e396bfd6fb15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py new file mode 100644 index 000000000000..892707b7f133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable' From 6cf80af8807550ec801ed5a5f7d3cd53bb37c846 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 17 Jul 2019 09:42:45 -0700 Subject: [PATCH 08/30] [AutoPR datafactory/resource-manager] SSIS File System Support (#6216) * Generated from 29f3be5668f9d26352c4711117630ff4a4fd431b SSIS File System Support * Generated from 29f3be5668f9d26352c4711117630ff4a4fd431b SSIS File System Support --- .../azure/mgmt/datafactory/models/__init__.py | 8 +++ .../data_factory_management_client_enums.py | 6 ++ .../models/execute_ssis_package_activity.py | 4 ++ .../execute_ssis_package_activity_py3.py | 6 +- .../models/ssis_access_credential.py | 44 ++++++++++++++ .../models/ssis_access_credential_py3.py | 44 ++++++++++++++ .../datafactory/models/ssis_log_location.py | 57 +++++++++++++++++++ .../models/ssis_log_location_py3.py | 57 +++++++++++++++++++ .../models/ssis_package_location.py | 19 +++++++ .../models/ssis_package_location_py3.py | 21 ++++++- 10 files changed, 264 insertions(+), 2 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index c926f3904e56..c76972d1c03f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -393,6 +393,8 @@ from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity from .custom_activity_reference_object_py3 import CustomActivityReferenceObject from .custom_activity_py3 import CustomActivity + from .ssis_access_credential_py3 import SSISAccessCredential + from .ssis_log_location_py3 import SSISLogLocation from .ssis_property_override_py3 import SSISPropertyOverride from .ssis_execution_parameter_py3 import SSISExecutionParameter from .ssis_execution_credential_py3 import SSISExecutionCredential @@ -872,6 +874,8 @@ from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity from .custom_activity_reference_object import CustomActivityReferenceObject from .custom_activity import CustomActivity + from .ssis_access_credential import SSISAccessCredential + from .ssis_log_location import SSISLogLocation from .ssis_property_override import SSISPropertyOverride from .ssis_execution_parameter import SSISExecutionParameter from .ssis_execution_credential import SSISExecutionCredential @@ -1024,6 +1028,7 @@ TeradataPartitionOption, StoredProcedureParameterType, SalesforceSourceReadBehavior, + SsisPackageLocationType, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, @@ -1427,6 +1432,8 @@ 'SqlServerStoredProcedureActivity', 'CustomActivityReferenceObject', 'CustomActivity', + 'SSISAccessCredential', + 'SSISLogLocation', 'SSISPropertyOverride', 'SSISExecutionParameter', 'SSISExecutionCredential', @@ -1578,6 +1585,7 @@ 'TeradataPartitionOption', 'StoredProcedureParameterType', 'SalesforceSourceReadBehavior', + 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index b91a62663e84..d643be25fcf4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -390,6 +390,12 @@ class SalesforceSourceReadBehavior(str, Enum): query_all = "QueryAll" +class SsisPackageLocationType(str, Enum): + + ssisdb = "SSISDB" + file = "File" + + class HDInsightActivityDebugInfoOption(str, Enum): none = "None" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py index 3ea2abd2e734..9efa853dac86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -73,6 +73,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -102,6 +104,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } def __init__(self, **kwargs): @@ -117,4 +120,5 @@ def __init__(self, **kwargs): self.project_connection_managers = kwargs.get('project_connection_managers', None) self.package_connection_managers = kwargs.get('package_connection_managers', None) self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py index fb72bacf03d9..64efa9cd63ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py @@ -73,6 +73,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -102,9 +104,10 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } - def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, **kwargs) -> None: + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.package_location = package_location self.runtime = runtime @@ -117,4 +120,5 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.project_connection_managers = project_connection_managers self.package_connection_managers = package_connection_managers self.property_overrides = property_overrides + self.log_location = log_location self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py new file mode 100644 index 000000000000..63512fdec4d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py new file mode 100644 index 000000000000..5df0fc8941da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py new file mode 100644 index 000000000000..cfdebe717541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, **kwargs): + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = kwargs.get('log_path', None) + self.access_credential = kwargs.get('access_credential', None) + self.log_refresh_interval = kwargs.get('log_refresh_interval', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py new file mode 100644 index 000000000000..de4fbe35dcb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py index 81a17eb8fe53..248d0aa9b8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -20,6 +20,17 @@ class SSISPackageLocation(Model): :param package_path: Required. The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object """ _validation = { @@ -28,8 +39,16 @@ class SSISPackageLocation(Model): _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } def __init__(self, **kwargs): super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = kwargs.get('package_path', None) + self.type = kwargs.get('type', None) + self.package_password = kwargs.get('package_password', None) + self.access_credential = kwargs.get('access_credential', None) + self.configuration_path = kwargs.get('configuration_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py index af139da47d88..cc442d8d35b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py @@ -20,6 +20,17 @@ class SSISPackageLocation(Model): :param package_path: Required. The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object """ _validation = { @@ -28,8 +39,16 @@ class SSISPackageLocation(Model): _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } - def __init__(self, *, package_path, **kwargs) -> None: + def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path From 9003893946b8bb108f9fe5725e73aab111628bef Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Mon, 22 Jul 2019 20:00:06 -0700 Subject: [PATCH 09/30] [AutoPR datafactory/resource-manager] Introduce ADX Command (#6404) * Generated from 0ae079d21b3b37fb36dfa54e0d0ad46c81329e48 Introduce ADX Command * Generated from 37671c3194eee7f29e4d05851515a094ad8cca91 Use full ADX name --- .../azure/mgmt/datafactory/models/__init__.py | 69 +++++++++++++++-- .../models/azure_data_explorer_command.py | 65 ++++++++++++++++ .../models/azure_data_explorer_command_py3.py | 65 ++++++++++++++++ .../models/azure_postgre_sql_sink.py | 66 ++++++++++++++++ .../models/azure_postgre_sql_sink_py3.py | 66 ++++++++++++++++ .../models/azure_postgre_sql_table_dataset.py | 14 +++- .../azure_postgre_sql_table_dataset_py3.py | 16 +++- .../mgmt/datafactory/models/binary_dataset.py | 77 +++++++++++++++++++ .../datafactory/models/binary_dataset_py3.py | 77 +++++++++++++++++++ .../mgmt/datafactory/models/binary_sink.py | 65 ++++++++++++++++ .../datafactory/models/binary_sink_py3.py | 65 ++++++++++++++++ .../mgmt/datafactory/models/binary_source.py | 56 ++++++++++++++ .../datafactory/models/binary_source_py3.py | 56 ++++++++++++++ .../mgmt/datafactory/models/copy_sink.py | 7 +- .../mgmt/datafactory/models/copy_sink_py3.py | 7 +- .../mgmt/datafactory/models/copy_source.py | 12 +-- .../datafactory/models/copy_source_py3.py | 12 +-- .../data_factory_management_client_enums.py | 24 ++++++ .../azure/mgmt/datafactory/models/dataset.py | 7 +- .../mgmt/datafactory/models/dataset_py3.py | 7 +- .../mgmt/datafactory/models/db2_source.py | 57 ++++++++++++++ .../mgmt/datafactory/models/db2_source_py3.py | 57 ++++++++++++++ .../datafactory/models/execution_activity.py | 7 +- .../models/execution_activity_py3.py | 7 +- .../mgmt/datafactory/models/my_sql_source.py | 57 ++++++++++++++ .../datafactory/models/my_sql_source_py3.py | 57 ++++++++++++++ .../models/my_sql_table_dataset.py | 72 +++++++++++++++++ .../models/my_sql_table_dataset_py3.py | 72 +++++++++++++++++ .../models/netezza_partition_settings.py | 42 ++++++++++ .../models/netezza_partition_settings_py3.py | 42 ++++++++++ .../mgmt/datafactory/models/netezza_source.py | 13 ++++ .../datafactory/models/netezza_source_py3.py | 15 +++- .../mgmt/datafactory/models/odata_source.py | 57 ++++++++++++++ .../datafactory/models/odata_source_py3.py | 57 ++++++++++++++ .../mgmt/datafactory/models/odbc_source.py | 57 ++++++++++++++ .../datafactory/models/odbc_source_py3.py | 57 ++++++++++++++ .../datafactory/models/odbc_table_dataset.py | 72 +++++++++++++++++ .../models/odbc_table_dataset_py3.py | 72 +++++++++++++++++ .../models/oracle_partition_settings.py | 46 +++++++++++ .../models/oracle_partition_settings_py3.py | 46 +++++++++++ .../mgmt/datafactory/models/oracle_source.py | 13 ++++ .../datafactory/models/oracle_source_py3.py | 15 +++- .../datafactory/models/postgre_sql_source.py | 57 ++++++++++++++ .../models/postgre_sql_source_py3.py | 57 ++++++++++++++ .../models/postgre_sql_table_dataset.py | 72 +++++++++++++++++ .../models/postgre_sql_table_dataset_py3.py | 72 +++++++++++++++++ .../models/sap_table_partition_settings.py | 47 +++++++++++ .../sap_table_partition_settings_py3.py | 47 +++++++++++ .../datafactory/models/sap_table_source.py | 14 ++++ .../models/sap_table_source_py3.py | 16 +++- .../mgmt/datafactory/models/sybase_source.py | 57 ++++++++++++++ .../datafactory/models/sybase_source_py3.py | 57 ++++++++++++++ 52 files changed, 2279 insertions(+), 40 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index c76972d1c03f..c083add932e7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -247,6 +247,9 @@ from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset_py3 import PostgreSqlTableDataset + from .my_sql_table_dataset_py3 import MySqlTableDataset + from .odbc_table_dataset_py3 import OdbcTableDataset from .informix_table_dataset_py3 import InformixTableDataset from .relational_table_dataset_py3 import RelationalTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset @@ -281,6 +284,7 @@ from .azure_blob_fs_location_py3 import AzureBlobFSLocation from .azure_blob_storage_location_py3 import AzureBlobStorageLocation from .dataset_location_py3 import DatasetLocation + from .binary_dataset_py3 import BinaryDataset from .delimited_text_dataset_py3 import DelimitedTextDataset from .parquet_dataset_py3 import ParquetDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset @@ -304,6 +308,7 @@ from .responsys_source_py3 import ResponsysSource from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource from .vertica_source_py3 import VerticaSource + from .netezza_partition_settings_py3 import NetezzaPartitionSettings from .netezza_source_py3 import NetezzaSource from .zoho_source_py3 import ZohoSource from .xero_source_py3 import XeroSource @@ -343,6 +348,7 @@ from .web_source_py3 import WebSource from .teradata_partition_settings_py3 import TeradataPartitionSettings from .teradata_source_py3 import TeradataSource + from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -356,6 +362,7 @@ from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource from .rest_source_py3 import RestSource + from .sap_table_partition_settings_py3 import SapTablePartitionSettings from .sap_table_source_py3 import SapTableSource from .sap_open_hub_source_py3 import SapOpenHubSource from .sap_hana_source_py3 import SapHanaSource @@ -363,6 +370,12 @@ from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource from .salesforce_source_py3 import SalesforceSource + from .odata_source_py3 import ODataSource + from .sybase_source_py3 import SybaseSource + from .postgre_sql_source_py3 import PostgreSqlSource + from .my_sql_source_py3 import MySqlSource + from .odbc_source_py3 import OdbcSource + from .db2_source_py3 import Db2Source from .microsoft_access_source_py3 import MicrosoftAccessSource from .informix_source_py3 import InformixSource from .relational_source_py3 import RelationalSource @@ -372,8 +385,6 @@ from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource - from .format_read_settings_py3 import FormatReadSettings - from .delimited_text_read_settings_py3 import DelimitedTextReadSettings from .hdfs_read_settings_py3 import HdfsReadSettings from .http_read_settings_py3 import HttpReadSettings from .sftp_read_settings_py3 import SftpReadSettings @@ -384,10 +395,14 @@ from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings from .store_read_settings_py3 import StoreReadSettings + from .binary_source_py3 import BinarySource + from .format_read_settings_py3 import FormatReadSettings + from .delimited_text_read_settings_py3 import DelimitedTextReadSettings from .delimited_text_source_py3 import DelimitedTextSource from .parquet_source_py3 import ParquetSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity + from .azure_data_explorer_command_py3 import AzureDataExplorerCommand from .log_storage_settings_py3 import LogStorageSettings from .delete_activity_py3 import DeleteActivity from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity @@ -430,6 +445,7 @@ from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink + from .binary_sink_py3 import BinarySink from .file_server_write_settings_py3 import FileServerWriteSettings from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings @@ -439,6 +455,7 @@ from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink from .format_write_settings_py3 import FormatWriteSettings from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings from .delimited_text_sink_py3 import DelimitedTextSink @@ -728,6 +745,9 @@ from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset import SalesforceObjectDataset from .microsoft_access_table_dataset import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset import PostgreSqlTableDataset + from .my_sql_table_dataset import MySqlTableDataset + from .odbc_table_dataset import OdbcTableDataset from .informix_table_dataset import InformixTableDataset from .relational_table_dataset import RelationalTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset @@ -762,6 +782,7 @@ from .azure_blob_fs_location import AzureBlobFSLocation from .azure_blob_storage_location import AzureBlobStorageLocation from .dataset_location import DatasetLocation + from .binary_dataset import BinaryDataset from .delimited_text_dataset import DelimitedTextDataset from .parquet_dataset import ParquetDataset from .amazon_s3_dataset import AmazonS3Dataset @@ -785,6 +806,7 @@ from .responsys_source import ResponsysSource from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource from .vertica_source import VerticaSource + from .netezza_partition_settings import NetezzaPartitionSettings from .netezza_source import NetezzaSource from .zoho_source import ZohoSource from .xero_source import XeroSource @@ -824,6 +846,7 @@ from .web_source import WebSource from .teradata_partition_settings import TeradataPartitionSettings from .teradata_source import TeradataSource + from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -837,6 +860,7 @@ from .sql_server_source import SqlServerSource from .sql_source import SqlSource from .rest_source import RestSource + from .sap_table_partition_settings import SapTablePartitionSettings from .sap_table_source import SapTableSource from .sap_open_hub_source import SapOpenHubSource from .sap_hana_source import SapHanaSource @@ -844,6 +868,12 @@ from .sap_cloud_for_customer_source import SapCloudForCustomerSource from .salesforce_service_cloud_source import SalesforceServiceCloudSource from .salesforce_source import SalesforceSource + from .odata_source import ODataSource + from .sybase_source import SybaseSource + from .postgre_sql_source import PostgreSqlSource + from .my_sql_source import MySqlSource + from .odbc_source import OdbcSource + from .db2_source import Db2Source from .microsoft_access_source import MicrosoftAccessSource from .informix_source import InformixSource from .relational_source import RelationalSource @@ -853,8 +883,6 @@ from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource - from .format_read_settings import FormatReadSettings - from .delimited_text_read_settings import DelimitedTextReadSettings from .hdfs_read_settings import HdfsReadSettings from .http_read_settings import HttpReadSettings from .sftp_read_settings import SftpReadSettings @@ -865,10 +893,14 @@ from .azure_blob_fs_read_settings import AzureBlobFSReadSettings from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings from .store_read_settings import StoreReadSettings + from .binary_source import BinarySource + from .format_read_settings import FormatReadSettings + from .delimited_text_read_settings import DelimitedTextReadSettings from .delimited_text_source import DelimitedTextSource from .parquet_source import ParquetSource from .copy_source import CopySource from .lookup_activity import LookupActivity + from .azure_data_explorer_command import AzureDataExplorerCommand from .log_storage_settings import LogStorageSettings from .delete_activity import DeleteActivity from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity @@ -911,6 +943,7 @@ from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink + from .binary_sink import BinarySink from .file_server_write_settings import FileServerWriteSettings from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings @@ -920,6 +953,7 @@ from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .azure_postgre_sql_sink import AzurePostgreSqlSink from .format_write_settings import FormatWriteSettings from .delimited_text_write_settings import DelimitedTextWriteSettings from .delimited_text_sink import DelimitedTextSink @@ -1024,9 +1058,12 @@ DynamicsAuthenticationType, AzureFunctionActivityMethod, WebActivityMethod, + NetezzaPartitionOption, CassandraSourceReadConsistencyLevels, TeradataPartitionOption, + OraclePartitionOption, StoredProcedureParameterType, + SapTablePartitionOption, SalesforceSourceReadBehavior, SsisPackageLocationType, HDInsightActivityDebugInfoOption, @@ -1286,6 +1323,9 @@ 'SalesforceServiceCloudObjectDataset', 'SalesforceObjectDataset', 'MicrosoftAccessTableDataset', + 'PostgreSqlTableDataset', + 'MySqlTableDataset', + 'OdbcTableDataset', 'InformixTableDataset', 'RelationalTableDataset', 'AzureMySqlTableDataset', @@ -1320,6 +1360,7 @@ 'AzureBlobFSLocation', 'AzureBlobStorageLocation', 'DatasetLocation', + 'BinaryDataset', 'DelimitedTextDataset', 'ParquetDataset', 'AmazonS3Dataset', @@ -1343,6 +1384,7 @@ 'ResponsysSource', 'SalesforceMarketingCloudSource', 'VerticaSource', + 'NetezzaPartitionSettings', 'NetezzaSource', 'ZohoSource', 'XeroSource', @@ -1382,6 +1424,7 @@ 'WebSource', 'TeradataPartitionSettings', 'TeradataSource', + 'OraclePartitionSettings', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1395,6 +1438,7 @@ 'SqlServerSource', 'SqlSource', 'RestSource', + 'SapTablePartitionSettings', 'SapTableSource', 'SapOpenHubSource', 'SapHanaSource', @@ -1402,6 +1446,12 @@ 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource', 'SalesforceSource', + 'ODataSource', + 'SybaseSource', + 'PostgreSqlSource', + 'MySqlSource', + 'OdbcSource', + 'Db2Source', 'MicrosoftAccessSource', 'InformixSource', 'RelationalSource', @@ -1411,8 +1461,6 @@ 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', - 'FormatReadSettings', - 'DelimitedTextReadSettings', 'HdfsReadSettings', 'HttpReadSettings', 'SftpReadSettings', @@ -1423,10 +1471,14 @@ 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings', 'StoreReadSettings', + 'BinarySource', + 'FormatReadSettings', + 'DelimitedTextReadSettings', 'DelimitedTextSource', 'ParquetSource', 'CopySource', 'LookupActivity', + 'AzureDataExplorerCommand', 'LogStorageSettings', 'DeleteActivity', 'SqlServerStoredProcedureActivity', @@ -1469,6 +1521,7 @@ 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', + 'BinarySink', 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings', @@ -1478,6 +1531,7 @@ 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', + 'AzurePostgreSqlSink', 'FormatWriteSettings', 'DelimitedTextWriteSettings', 'DelimitedTextSink', @@ -1581,9 +1635,12 @@ 'DynamicsAuthenticationType', 'AzureFunctionActivityMethod', 'WebActivityMethod', + 'NetezzaPartitionOption', 'CassandraSourceReadConsistencyLevels', 'TeradataPartitionOption', + 'OraclePartitionOption', 'StoredProcedureParameterType', + 'SapTablePartitionOption', 'SalesforceSourceReadBehavior', 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py new file mode 100644 index 000000000000..8d100b9e07b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureDataExplorerCommand(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerCommand, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py new file mode 100644 index 000000000000..d3b48a223908 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureDataExplorerCommand(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(AzureDataExplorerCommand, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py new file mode 100644 index 000000000000..6214e1ba1f22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py new file mode 100644 index 000000000000..b7cd0ec51a29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py index 8960acc0df75..933264b57a9b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py @@ -43,9 +43,17 @@ class AzurePostgreSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object """ _validation = { @@ -64,9 +72,13 @@ class AzurePostgreSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py index fddf0720c565..485dc3efb102 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py @@ -43,9 +43,17 @@ class AzurePostgreSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object """ _validation = { @@ -64,9 +72,13 @@ class AzurePostgreSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py new file mode 100644 index 000000000000..5f0f8ef96696 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(BinaryDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py new file mode 100644 index 000000000000..7d26b216fd7a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression = compression + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py new file mode 100644 index 000000000000..2e3be83edca7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py new file mode 100644 index 000000000000..accac13e3033 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py new file mode 100644 index 000000000000..48e78e7d24bf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py new file mode 100644 index 000000000000..aa9a9f1412ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 7631a05e1d46..7fbd06c94943 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -21,8 +21,9 @@ class CopySink(Model): DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, ParquetSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, DelimitedTextSink + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, + AzurePostgreSqlSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -66,7 +67,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 238da7530e0e..5207aa592b26 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -21,8 +21,9 @@ class CopySink(Model): DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, ParquetSink, - AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, DelimitedTextSink + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, + AzurePostgreSqlSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -66,7 +67,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index a4e09622c6f4..f4c7a234ab5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -31,10 +31,12 @@ class CopySource(Model): FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, MicrosoftAccessSource, - InformixSource, RelationalSource, CommonDataServiceForAppsSource, - DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource, DelimitedTextSource, ParquetSource + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SybaseSource, + PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -69,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index daf6fef73dad..a1b3ee8e1253 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -31,10 +31,12 @@ class CopySource(Model): FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, MicrosoftAccessSource, - InformixSource, RelationalSource, CommonDataServiceForAppsSource, - DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, - AzureTableSource, DelimitedTextSource, ParquetSource + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SybaseSource, + PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -69,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index d643be25fcf4..19a322baf76e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -352,6 +352,13 @@ class WebActivityMethod(str, Enum): delete = "DELETE" +class NetezzaPartitionOption(str, Enum): + + none = "None" + data_slice = "DataSlice" + dynamic_range = "DynamicRange" + + class CassandraSourceReadConsistencyLevels(str, Enum): all = "ALL" @@ -373,6 +380,13 @@ class TeradataPartitionOption(str, Enum): dynamic_range = "DynamicRange" +class OraclePartitionOption(str, Enum): + + none = "None" + physical_partitions_of_table = "PhysicalPartitionsOfTable" + dynamic_range = "DynamicRange" + + class StoredProcedureParameterType(str, Enum): string = "String" @@ -384,6 +398,16 @@ class StoredProcedureParameterType(str, Enum): date_enum = "Date" +class SapTablePartitionOption(str, Enum): + + none = "None" + partition_on_int = "PartitionOnInt" + partition_on_calendar_year = "PartitionOnCalendarYear" + partition_on_calendar_month = "PartitionOnCalendarMonth" + partition_on_calendar_date = "PartitionOnCalendarDate" + partition_on_time = "PartitionOnTime" + + class SalesforceSourceReadBehavior(str, Enum): query = "Query" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 9ed478d1a7b9..27de2e85bf37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -34,7 +34,8 @@ class Dataset(Model): SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, InformixTableDataset, RelationalTableDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, @@ -42,7 +43,7 @@ class Dataset(Model): CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -93,7 +94,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 7ef986b32db5..8aedef87f3a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -34,7 +34,8 @@ class Dataset(Model): SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, InformixTableDataset, RelationalTableDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, @@ -42,7 +43,7 @@ class Dataset(Model): CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -93,7 +94,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py new file mode 100644 index 000000000000..a6e8c31ffa1f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2Source, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py new file mode 100644 index 000000000000..20b169699ae0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py index aca89a009b8e..4d8aecdaf510 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -20,8 +20,9 @@ class ExecutionActivity(Activity): DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, DeleteActivity, SqlServerStoredProcedureActivity, - CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, + LookupActivity, AzureDataExplorerCommand, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity @@ -64,7 +65,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommand', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py index 7f3b452fc3f9..f8ab752ec3bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py @@ -20,8 +20,9 @@ class ExecutionActivity(Activity): DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, DeleteActivity, SqlServerStoredProcedureActivity, - CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, + LookupActivity, AzureDataExplorerCommand, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity @@ -64,7 +65,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommand', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py new file mode 100644 index 000000000000..c2b0b66eabb1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py new file mode 100644 index 000000000000..3a0315d83979 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py new file mode 100644 index 000000000000..3bb1584975d5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..33263561dfde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py new file mode 100644 index 000000000000..b6c1ca9ba5da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py new file mode 100644 index 000000000000..9f071eae60ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py index caf73f9ef81d..3c66032bf48d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -36,6 +36,15 @@ class NetezzaSource(CopySource): :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -49,9 +58,13 @@ class NetezzaSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } def __init__(self, **kwargs): super(NetezzaSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py index 101a1f26a74d..f5dcc07e63d8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -36,6 +36,15 @@ class NetezzaSource(CopySource): :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { @@ -49,9 +58,13 @@ class NetezzaSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py new file mode 100644 index 000000000000..c70f440ff6cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py new file mode 100644 index 000000000000..83ba9bd7f2af --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py new file mode 100644 index 000000000000..9761d0c0aeb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py new file mode 100644 index 000000000000..52b059a8ad91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py new file mode 100644 index 000000000000..2f4f4261f4fc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py new file mode 100644 index 000000000000..070ddccd180d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..b4e9aa1b92f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..10641aab7f9f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 12b3aa31353f..db436192eca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -40,6 +40,15 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +63,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__(self, **kwargs): super(OracleSource, self).__init__(**kwargs) self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 43afe27fda2f..0a871809896e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -40,6 +40,15 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +63,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py new file mode 100644 index 000000000000..51dd25b25c60 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py new file mode 100644 index 000000000000..8aa12e4bdf8d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py new file mode 100644 index 000000000000..1c61e808abc9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The PostgreSQL table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..00cd65a84cd4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The PostgreSQL table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py new file mode 100644 index 000000000000..b688fe16683b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py new file mode 100644 index 000000000000..37bdf610ab35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py index e1865aa05535..35799515440e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -55,6 +55,16 @@ class SapTableSource(CopySource): function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -73,6 +83,8 @@ class SapTableSource(CopySource): 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } def __init__(self, **kwargs): @@ -83,4 +95,6 @@ def __init__(self, **kwargs): self.rfc_table_options = kwargs.get('rfc_table_options', None) self.batch_size = kwargs.get('batch_size', None) self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py index 73e915a14ed3..bed7bbb93932 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -55,6 +55,16 @@ class SapTableSource(CopySource): function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { @@ -73,9 +83,11 @@ class SapTableSource(CopySource): 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.row_count = row_count self.row_skips = row_skips @@ -83,4 +95,6 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.rfc_table_options = rfc_table_options self.batch_size = batch_size self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py new file mode 100644 index 000000000000..02f89a8fca25 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py new file mode 100644 index 000000000000..c11e96174349 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SybaseSource' From b46566ae20d744546776e33063f76ada36096dfe Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Tue, 23 Jul 2019 11:56:11 -0700 Subject: [PATCH 10/30] [AutoPR datafactory/resource-manager] fix: datafactory character encoding (#6423) * Generated from 1f768e0b1251c521df6386353c805af1f1980b87 fix: datafactory character encoding * Generated from 1f768e0b1251c521df6386353c805af1f1980b87 fix: datafactory character encoding --- .../mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py | 2 +- .../datafactory/models/cosmos_db_mongo_db_api_source_py3.py | 2 +- .../azure/mgmt/datafactory/models/mongo_db_v2_source.py | 2 +- .../azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py index dae49c1d45eb..44dc7443427b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -42,7 +42,7 @@ class CosmosDbMongoDbApiSource(CopySource): ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property�s + batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py index a4c869645973..7d180f105abf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -42,7 +42,7 @@ class CosmosDbMongoDbApiSource(CopySource): ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property�s + batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py index 295b74228b9a..e951674a8e22 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -42,7 +42,7 @@ class MongoDbV2Source(CopySource): ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property�s + batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py index 872b060a49bb..9b8eec114a06 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -42,7 +42,7 @@ class MongoDbV2Source(CopySource): ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property�s + batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object From dfa74308ed6eec4d2fe3d9055c0a473644365cb0 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 25 Jul 2019 12:04:14 -0700 Subject: [PATCH 11/30] Generated from 6daaa9ba96f917b57001720be038e62850d1ccbc (#6471) Change type name and add timeout property --- .../azure/mgmt/datafactory/models/__init__.py | 6 +++--- ...nd.py => azure_data_explorer_command_activity.py} | 10 ++++++++-- ...y => azure_data_explorer_command_activity_py3.py} | 12 +++++++++--- .../mgmt/datafactory/models/execution_activity.py | 4 ++-- .../datafactory/models/execution_activity_py3.py | 4 ++-- 5 files changed, 24 insertions(+), 12 deletions(-) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_data_explorer_command.py => azure_data_explorer_command_activity.py} (83%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{azure_data_explorer_command_py3.py => azure_data_explorer_command_activity_py3.py} (77%) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index c083add932e7..2cf5158127a7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -402,7 +402,7 @@ from .parquet_source_py3 import ParquetSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity - from .azure_data_explorer_command_py3 import AzureDataExplorerCommand + from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity from .log_storage_settings_py3 import LogStorageSettings from .delete_activity_py3 import DeleteActivity from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity @@ -900,7 +900,7 @@ from .parquet_source import ParquetSource from .copy_source import CopySource from .lookup_activity import LookupActivity - from .azure_data_explorer_command import AzureDataExplorerCommand + from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity from .log_storage_settings import LogStorageSettings from .delete_activity import DeleteActivity from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity @@ -1478,7 +1478,7 @@ 'ParquetSource', 'CopySource', 'LookupActivity', - 'AzureDataExplorerCommand', + 'AzureDataExplorerCommandActivity', 'LogStorageSettings', 'DeleteActivity', 'SqlServerStoredProcedureActivity', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py similarity index 83% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py index 8d100b9e07b7..308d445d1726 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py @@ -12,7 +12,7 @@ from .execution_activity import ExecutionActivity -class AzureDataExplorerCommand(ExecutionActivity): +class AzureDataExplorerCommandActivity(ExecutionActivity): """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. @@ -39,6 +39,10 @@ class AzureDataExplorerCommand(ExecutionActivity): Explorer command syntax. Type: string (or Expression with resultType string). :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object """ _validation = { @@ -57,9 +61,11 @@ class AzureDataExplorerCommand(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } def __init__(self, **kwargs): - super(AzureDataExplorerCommand, self).__init__(**kwargs) + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) self.command = kwargs.get('command', None) + self.command_timeout = kwargs.get('command_timeout', None) self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py similarity index 77% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py index d3b48a223908..2f04dfddf08f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py @@ -12,7 +12,7 @@ from .execution_activity_py3 import ExecutionActivity -class AzureDataExplorerCommand(ExecutionActivity): +class AzureDataExplorerCommandActivity(ExecutionActivity): """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. @@ -39,6 +39,10 @@ class AzureDataExplorerCommand(ExecutionActivity): Explorer command syntax. Type: string (or Expression with resultType string). :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object """ _validation = { @@ -57,9 +61,11 @@ class AzureDataExplorerCommand(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(AzureDataExplorerCommand, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command + self.command_timeout = command_timeout self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py index 4d8aecdaf510..8c16eff2c753 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -20,7 +20,7 @@ class ExecutionActivity(Activity): DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommand, DeleteActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, SqlServerStoredProcedureActivity, CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, @@ -65,7 +65,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommand', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py index f8ab752ec3bf..5deb58db81a7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py @@ -20,7 +20,7 @@ class ExecutionActivity(Activity): DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommand, DeleteActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, SqlServerStoredProcedureActivity, CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, @@ -65,7 +65,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommand', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: From aef9b6b9354296067bc9606a0946e76a51a4181c Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Sun, 28 Jul 2019 23:45:33 -0700 Subject: [PATCH 12/30] Generated from 04df2c4ad1350ec47a500e1a1d1a609d43398aee (#6505) support dataset v2 split name --- .../models/azure_sql_dw_table_dataset.py | 14 ++++++++++++-- .../models/azure_sql_dw_table_dataset_py3.py | 16 +++++++++++++--- .../models/azure_sql_table_dataset.py | 14 ++++++++++++-- .../models/azure_sql_table_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/drill_table_dataset.py | 14 ++++++++++++-- .../models/drill_table_dataset_py3.py | 16 +++++++++++++--- .../models/google_big_query_object_dataset.py | 14 ++++++++++++-- .../google_big_query_object_dataset_py3.py | 16 +++++++++++++--- .../models/greenplum_table_dataset.py | 14 ++++++++++++-- .../models/greenplum_table_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/hive_object_dataset.py | 14 ++++++++++++-- .../models/hive_object_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/impala_object_dataset.py | 14 ++++++++++++-- .../models/impala_object_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/oracle_table_dataset.py | 14 ++++++++++++-- .../models/oracle_table_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/phoenix_object_dataset.py | 14 ++++++++++++-- .../models/phoenix_object_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/presto_object_dataset.py | 14 ++++++++++++-- .../models/presto_object_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/spark_object_dataset.py | 14 ++++++++++++-- .../models/spark_object_dataset_py3.py | 16 +++++++++++++--- .../models/sql_server_table_dataset.py | 14 ++++++++++++-- .../models/sql_server_table_dataset_py3.py | 16 +++++++++++++--- .../datafactory/models/vertica_table_dataset.py | 14 ++++++++++++-- .../models/vertica_table_dataset_py3.py | 16 +++++++++++++--- 26 files changed, 325 insertions(+), 65 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py index 0921505515d5..ed9fe8904d73 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -43,9 +43,15 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class AzureSqlDWTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureSqlDWTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py index 0be72998fc64..a38e4ab479c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -43,9 +43,15 @@ class AzureSqlDWTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class AzureSqlDWTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema + self.table = table self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py index 8d8d90d76e85..ce8b08944f3a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -43,9 +43,15 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the Azure SQL database. Type: string - (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class AzureSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureSqlTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py index 3d4f1eac3f58..3ed19ee47e7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -43,9 +43,15 @@ class AzureSqlTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the Azure SQL database. Type: string - (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class AzureSqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema + self.table = table self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py index c12b086b7824..3dfd5715deb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py @@ -43,9 +43,15 @@ class DrillTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class DrillTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(DrillTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py index f4f5712f29e3..db46bdc4e0bd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py @@ -43,9 +43,15 @@ class DrillTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class DrillTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.drill_table_dataset_schema = drill_table_dataset_schema self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py index 5750875dc3a0..920489742bbf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py @@ -43,9 +43,15 @@ class GoogleBigQueryObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + database + table properties instead. :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object """ _validation = { @@ -64,9 +70,13 @@ class GoogleBigQueryObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } def __init__(self, **kwargs): super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py index 625cd068b731..205819f8eeef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py @@ -43,9 +43,15 @@ class GoogleBigQueryObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + database + table properties instead. :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object """ _validation = { @@ -64,9 +70,13 @@ class GoogleBigQueryObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.dataset = dataset self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py index fa4a066f11a9..eb0ea08ee544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py @@ -43,9 +43,15 @@ class GreenplumTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class GreenplumTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(GreenplumTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py index 7c698db22339..7f37fff9108d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py @@ -43,9 +43,15 @@ class GreenplumTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class GreenplumTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.greenplum_table_dataset_schema = greenplum_table_dataset_schema self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py index 7dc4fd367f8a..07b6f2b54901 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py @@ -43,9 +43,15 @@ class HiveObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class HiveObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(HiveObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py index c007333721be..69384bdfa99a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py @@ -43,9 +43,15 @@ class HiveObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class HiveObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.hive_object_dataset_schema = hive_object_dataset_schema self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py index d9bf591d8021..8faee4f09240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py @@ -43,9 +43,15 @@ class ImpalaObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class ImpalaObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(ImpalaObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py index d103603b2586..5652b5c9e4b0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py @@ -43,9 +43,15 @@ class ImpalaObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class ImpalaObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.impala_object_dataset_schema = impala_object_dataset_schema self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py index af51100cd88e..c76b5ced3e5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -43,9 +43,15 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the on-premises Oracle database. - Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class OracleTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(OracleTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py index 563371653de8..b588fbac5244 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -43,9 +43,15 @@ class OracleTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the on-premises Oracle database. - Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class OracleTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.oracle_table_dataset_schema = oracle_table_dataset_schema + self.table = table self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py index 2d9cd5dcd581..ccaa2eb49abd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py @@ -43,9 +43,15 @@ class PhoenixObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PhoenixObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(PhoenixObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py index 32c6e5f9836f..cda4dc41dc22 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py @@ -43,9 +43,15 @@ class PhoenixObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PhoenixObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.phoenix_object_dataset_schema = phoenix_object_dataset_schema self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py index 35ceaa1389a7..eb80e1a97750 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py @@ -43,9 +43,15 @@ class PrestoObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PrestoObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(PrestoObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py index 193004e2c381..e3bd2f7e36aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py @@ -43,9 +43,15 @@ class PrestoObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class PrestoObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.presto_object_dataset_schema = presto_object_dataset_schema self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py index 8d1493ea9c7f..bdbdf067e1ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py @@ -43,9 +43,15 @@ class SparkObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class SparkObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(SparkObjectDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py index 3ab167dd3540..afe383951f1c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py @@ -43,9 +43,15 @@ class SparkObjectDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class SparkObjectDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.spark_object_dataset_schema = spark_object_dataset_schema self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py index d50540de4704..3998671ee8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -43,9 +43,15 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the SQL Server dataset. Type: string - (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class SqlServerTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(SqlServerTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py index bc8d4bec92e0..989780c9bfda 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -43,9 +43,15 @@ class SqlServerTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the SQL Server dataset. Type: string - (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { @@ -64,9 +70,13 @@ class SqlServerTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.sql_server_table_dataset_schema = sql_server_table_dataset_schema + self.table = table self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py index e84465f8ba07..151a0d000e3f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -43,9 +43,15 @@ class VerticaTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class VerticaTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, **kwargs): super(VerticaTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py index 87d69bb9a443..4c2fc8da32ad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py @@ -43,9 +43,15 @@ class VerticaTableDataset(Dataset): :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). + :param table_name: This property will be retired. Please consider using + schema + table properties instead. :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object """ _validation = { @@ -64,9 +70,13 @@ class VerticaTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table + self.vertica_table_dataset_schema = vertica_table_dataset_schema self.type = 'VerticaTable' From 93792601b9cdf8f6bb98e6e6122324d7535f393a Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Mon, 29 Jul 2019 09:18:50 -0700 Subject: [PATCH 13/30] [AutoPR datafactory/resource-manager] [DataFactory]SapBwCube and Sybase Dataset (#6518) * Generated from b88af2e2b065a6ff559d879d690d65096d1bb56f [DataFactory]SapBwCube and Sybase Dataset * Generated from b88af2e2b065a6ff559d879d690d65096d1bb56f [DataFactory]SapBwCube and Sybase Dataset --- .../azure/mgmt/datafactory/models/__init__.py | 9 +++ .../mgmt/datafactory/models/copy_source.py | 6 +- .../datafactory/models/copy_source_py3.py | 6 +- .../azure/mgmt/datafactory/models/dataset.py | 17 ++--- .../mgmt/datafactory/models/dataset_py3.py | 17 ++--- .../datafactory/models/sap_bw_cube_dataset.py | 67 +++++++++++++++++ .../models/sap_bw_cube_dataset_py3.py | 67 +++++++++++++++++ .../mgmt/datafactory/models/sap_bw_source.py | 57 +++++++++++++++ .../datafactory/models/sap_bw_source_py3.py | 57 +++++++++++++++ .../models/sybase_table_dataset.py | 72 +++++++++++++++++++ .../models/sybase_table_dataset_py3.py | 72 +++++++++++++++++++ 11 files changed, 425 insertions(+), 22 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 2cf5158127a7..bfb40aef6df8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -244,6 +244,8 @@ from .sap_hana_table_dataset_py3 import SapHanaTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset_py3 import SapBwCubeDataset + from .sybase_table_dataset_py3 import SybaseTableDataset from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset @@ -371,6 +373,7 @@ from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource from .salesforce_source_py3 import SalesforceSource from .odata_source_py3 import ODataSource + from .sap_bw_source_py3 import SapBwSource from .sybase_source_py3 import SybaseSource from .postgre_sql_source_py3 import PostgreSqlSource from .my_sql_source_py3 import MySqlSource @@ -742,6 +745,8 @@ from .sap_hana_table_dataset import SapHanaTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset import SapBwCubeDataset + from .sybase_table_dataset import SybaseTableDataset from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset import SalesforceObjectDataset from .microsoft_access_table_dataset import MicrosoftAccessTableDataset @@ -869,6 +874,7 @@ from .salesforce_service_cloud_source import SalesforceServiceCloudSource from .salesforce_source import SalesforceSource from .odata_source import ODataSource + from .sap_bw_source import SapBwSource from .sybase_source import SybaseSource from .postgre_sql_source import PostgreSqlSource from .my_sql_source import MySqlSource @@ -1320,6 +1326,8 @@ 'SapHanaTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', + 'SapBwCubeDataset', + 'SybaseTableDataset', 'SalesforceServiceCloudObjectDataset', 'SalesforceObjectDataset', 'MicrosoftAccessTableDataset', @@ -1447,6 +1455,7 @@ 'SalesforceServiceCloudSource', 'SalesforceSource', 'ODataSource', + 'SapBwSource', 'SybaseSource', 'PostgreSqlSource', 'MySqlSource', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index f4c7a234ab5c..b470cbbb948b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -31,8 +31,8 @@ class CopySource(Model): FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SybaseSource, - PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, MicrosoftAccessSource, InformixSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, @@ -71,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index a1b3ee8e1253..d66045e6f5b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -31,8 +31,8 @@ class CopySource(Model): FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SybaseSource, - PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, MicrosoftAccessSource, InformixSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, @@ -71,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 27de2e85bf37..eb856ba35b68 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -33,13 +33,14 @@ class Dataset(Model): WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, + SalesforceObjectDataset, MicrosoftAccessTableDataset, + PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, + InformixTableDataset, RelationalTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, @@ -94,7 +95,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 8aedef87f3a9..6b6486b61fe3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -33,13 +33,14 @@ class Dataset(Model): WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, + SalesforceObjectDataset, MicrosoftAccessTableDataset, + PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, + InformixTableDataset, RelationalTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, @@ -94,7 +95,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py new file mode 100644 index 000000000000..048d26f85696 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapBwCubeDataset, self).__init__(**kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py new file mode 100644 index 000000000000..08334a824ba4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py new file mode 100644 index 000000000000..e3762d8e694e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBwSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py new file mode 100644 index 000000000000..ed6ff734742d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py new file mode 100644 index 000000000000..ff2dfd5471fb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py new file mode 100644 index 000000000000..88e9d3c287fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SybaseTable' From f5d3db0a038fb6cfaf09a939bbcd22963aa06914 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 31 Jul 2019 00:17:28 -0700 Subject: [PATCH 14/30] [AutoPR datafactory/resource-manager] Enable Avro Dataset in public swagger (#6567) * Generated from ec112148bf30430557ff3fac0c74f0706b1042de Enable Avro Dataset in public swagger * Generated from e41431428e45beaa5bbb12344d3332479c095e31 UPDATE --- .../azure/mgmt/datafactory/models/__init__.py | 26 ++++-- .../mgmt/datafactory/models/avro_dataset.py | 83 +++++++++++++++++++ .../datafactory/models/avro_dataset_py3.py | 83 +++++++++++++++++++ .../mgmt/datafactory/models/avro_sink.py | 69 +++++++++++++++ .../mgmt/datafactory/models/avro_sink_py3.py | 69 +++++++++++++++ .../mgmt/datafactory/models/avro_source.py | 56 +++++++++++++ .../datafactory/models/avro_source_py3.py | 56 +++++++++++++ .../datafactory/models/avro_write_settings.py | 46 ++++++++++ .../models/avro_write_settings_py3.py | 46 ++++++++++ .../mgmt/datafactory/models/copy_sink.py | 6 +- .../mgmt/datafactory/models/copy_sink_py3.py | 6 +- .../mgmt/datafactory/models/copy_source.py | 4 +- .../datafactory/models/copy_source_py3.py | 4 +- .../data_factory_management_client_enums.py | 9 ++ .../azure/mgmt/datafactory/models/dataset.py | 4 +- .../mgmt/datafactory/models/dataset_py3.py | 4 +- 16 files changed, 551 insertions(+), 20 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index bfb40aef6df8..55e2b9a103bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -289,6 +289,7 @@ from .binary_dataset_py3 import BinaryDataset from .delimited_text_dataset_py3 import DelimitedTextDataset from .parquet_dataset_py3 import ParquetDataset + from .avro_dataset_py3 import AvroDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset from .activity_policy_py3 import ActivityPolicy from .azure_function_activity_py3 import AzureFunctionActivity @@ -403,6 +404,7 @@ from .delimited_text_read_settings_py3 import DelimitedTextReadSettings from .delimited_text_source_py3 import DelimitedTextSource from .parquet_source_py3 import ParquetSource + from .avro_source_py3 import AvroSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity @@ -455,12 +457,14 @@ from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings from .store_write_settings_py3 import StoreWriteSettings from .parquet_sink_py3 import ParquetSink + from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings + from .format_write_settings_py3 import FormatWriteSettings + from .avro_write_settings_py3 import AvroWriteSettings + from .avro_sink_py3 import AvroSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink - from .format_write_settings_py3 import FormatWriteSettings - from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity @@ -790,6 +794,7 @@ from .binary_dataset import BinaryDataset from .delimited_text_dataset import DelimitedTextDataset from .parquet_dataset import ParquetDataset + from .avro_dataset import AvroDataset from .amazon_s3_dataset import AmazonS3Dataset from .activity_policy import ActivityPolicy from .azure_function_activity import AzureFunctionActivity @@ -904,6 +909,7 @@ from .delimited_text_read_settings import DelimitedTextReadSettings from .delimited_text_source import DelimitedTextSource from .parquet_source import ParquetSource + from .avro_source import AvroSource from .copy_source import CopySource from .lookup_activity import LookupActivity from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity @@ -956,12 +962,14 @@ from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings from .store_write_settings import StoreWriteSettings from .parquet_sink import ParquetSink + from .delimited_text_write_settings import DelimitedTextWriteSettings + from .format_write_settings import FormatWriteSettings + from .avro_write_settings import AvroWriteSettings + from .avro_sink import AvroSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink from .azure_postgre_sql_sink import AzurePostgreSqlSink - from .format_write_settings import FormatWriteSettings - from .delimited_text_write_settings import DelimitedTextWriteSettings from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity @@ -1062,6 +1070,7 @@ SybaseAuthenticationType, DynamicsDeploymentType, DynamicsAuthenticationType, + AvroCompressionCodec, AzureFunctionActivityMethod, WebActivityMethod, NetezzaPartitionOption, @@ -1371,6 +1380,7 @@ 'BinaryDataset', 'DelimitedTextDataset', 'ParquetDataset', + 'AvroDataset', 'AmazonS3Dataset', 'ActivityPolicy', 'AzureFunctionActivity', @@ -1485,6 +1495,7 @@ 'DelimitedTextReadSettings', 'DelimitedTextSource', 'ParquetSource', + 'AvroSource', 'CopySource', 'LookupActivity', 'AzureDataExplorerCommandActivity', @@ -1537,12 +1548,14 @@ 'AzureBlobStorageWriteSettings', 'StoreWriteSettings', 'ParquetSink', + 'DelimitedTextWriteSettings', + 'FormatWriteSettings', + 'AvroWriteSettings', + 'AvroSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', 'AzurePostgreSqlSink', - 'FormatWriteSettings', - 'DelimitedTextWriteSettings', 'DelimitedTextSink', 'CopySink', 'CopyActivity', @@ -1642,6 +1655,7 @@ 'SybaseAuthenticationType', 'DynamicsDeploymentType', 'DynamicsAuthenticationType', + 'AvroCompressionCodec', 'AzureFunctionActivityMethod', 'WebActivityMethod', 'NetezzaPartitionOption', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py new file mode 100644 index 000000000000..d206ac99ab85 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AvroDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py new file mode 100644 index 000000000000..f0f44dbbd786 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py new file mode 100644 index 000000000000..34d4ceb1e0f6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py new file mode 100644 index 000000000000..16363092dff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py new file mode 100644 index 000000000000..3ea2e7a2a76f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py new file mode 100644 index 000000000000..74b5e6db0fe2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py new file mode 100644 index 000000000000..ec068ee29885 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroWriteSettings, self).__init__(**kwargs) + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py new file mode 100644 index 000000000000..d14ebc4d1d29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.record_name = record_name + self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 7fbd06c94943..c0e000914325 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -22,8 +22,8 @@ class CopySink(Model): AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, - AzurePostgreSqlSink, DelimitedTextSink + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzurePostgreSqlSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -67,7 +67,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 5207aa592b26..ae60b61e61c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -22,8 +22,8 @@ class CopySink(Model): AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, - AzurePostgreSqlSink, DelimitedTextSink + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzurePostgreSqlSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -67,7 +67,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index b470cbbb948b..b34d47b55280 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -36,7 +36,7 @@ class CopySource(Model): MicrosoftAccessSource, InformixSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - DelimitedTextSource, ParquetSource + DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -71,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index d66045e6f5b4..867d77421f95 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -36,7 +36,7 @@ class CopySource(Model): MicrosoftAccessSource, InformixSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - DelimitedTextSource, ParquetSource + DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -71,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 19a322baf76e..45448073f831 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -333,6 +333,15 @@ class DynamicsAuthenticationType(str, Enum): ifd = "Ifd" +class AvroCompressionCodec(str, Enum): + + none = "none" + deflate = "deflate" + snappy = "snappy" + xz = "xz" + bzip2 = "bzip2" + + class AzureFunctionActivityMethod(str, Enum): get = "GET" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index eb856ba35b68..ecafb9588fce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -45,7 +45,7 @@ class Dataset(Model): DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - DelimitedTextDataset, ParquetDataset, AmazonS3Dataset + DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -95,7 +95,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 6b6486b61fe3..41614b28432a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -45,7 +45,7 @@ class Dataset(Model): DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - DelimitedTextDataset, ParquetDataset, AmazonS3Dataset + DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -95,7 +95,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: From 64a07f2df5854d798071dda7e84c033a1584b286 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 1 Aug 2019 23:17:19 -0700 Subject: [PATCH 15/30] Generated from ccc8c92e96ab27329cf637c7214ebb35da8dce23 (#6625) Fix model validation --- .../data_factory_management_client.py | 10 +-- .../azure/mgmt/datafactory/models/__init__.py | 9 +++ .../models/azure_maria_db_linked_service.py | 69 ++++++++++++++++++ .../azure_maria_db_linked_service_py3.py | 69 ++++++++++++++++++ .../models/azure_maria_db_source.py | 57 +++++++++++++++ .../models/azure_maria_db_source_py3.py | 57 +++++++++++++++ .../models/azure_maria_db_table_dataset.py | 72 +++++++++++++++++++ .../azure_maria_db_table_dataset_py3.py | 72 +++++++++++++++++++ .../mgmt/datafactory/models/copy_source.py | 22 +++--- .../datafactory/models/copy_source_py3.py | 22 +++--- .../azure/mgmt/datafactory/models/dataset.py | 36 +++++----- .../mgmt/datafactory/models/dataset_py3.py | 36 +++++----- .../mgmt/datafactory/models/linked_service.py | 18 ++--- .../datafactory/models/linked_service_py3.py | 18 ++--- .../datafactory/models/office365_source.py | 26 +++++++ .../models/office365_source_py3.py | 28 +++++++- .../mgmt/datafactory/models/rest_source.py | 21 ++++++ .../datafactory/models/rest_source_py3.py | 23 +++++- .../datafactory/models/sap_open_hub_source.py | 13 ++++ .../models/sap_open_hub_source_py3.py | 15 +++- .../mgmt/datafactory/operations/__init__.py | 4 +- .../operations/trigger_runs_operations.py | 59 +++++++++++++++ 22 files changed, 670 insertions(+), 86 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py index e49abccce72a..bb8a2a22fd77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -25,8 +25,8 @@ from .operations.pipeline_runs_operations import PipelineRunsOperations from .operations.activity_runs_operations import ActivityRunsOperations from .operations.triggers_operations import TriggersOperations -from .operations.rerun_triggers_operations import RerunTriggersOperations from .operations.trigger_runs_operations import TriggerRunsOperations +from .operations.rerun_triggers_operations import RerunTriggersOperations from . import models @@ -92,10 +92,10 @@ class DataFactoryManagementClient(SDKClient): :vartype activity_runs: azure.mgmt.datafactory.operations.ActivityRunsOperations :ivar triggers: Triggers operations :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations - :ivar rerun_triggers: RerunTriggers operations - :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :ivar trigger_runs: TriggerRuns operations :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations + :ivar rerun_triggers: RerunTriggers operations + :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials @@ -140,7 +140,7 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.triggers = TriggersOperations( self._client, self.config, self._serialize, self._deserialize) - self.rerun_triggers = RerunTriggersOperations( - self._client, self.config, self._serialize, self._deserialize) self.trigger_runs = TriggerRunsOperations( self._client, self.config, self._serialize, self._deserialize) + self.rerun_triggers = RerunTriggersOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 55e2b9a103bc..8ec6a5fa8fb4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -119,6 +119,7 @@ from .phoenix_linked_service_py3 import PhoenixLinkedService from .paypal_linked_service_py3 import PaypalLinkedService from .marketo_linked_service_py3 import MarketoLinkedService + from .azure_maria_db_linked_service_py3 import AzureMariaDBLinkedService from .maria_db_linked_service_py3 import MariaDBLinkedService from .magento_linked_service_py3 import MagentoLinkedService from .jira_linked_service_py3 import JiraLinkedService @@ -208,6 +209,7 @@ from .phoenix_object_dataset_py3 import PhoenixObjectDataset from .paypal_object_dataset_py3 import PaypalObjectDataset from .marketo_object_dataset_py3 import MarketoObjectDataset + from .azure_maria_db_table_dataset_py3 import AzureMariaDBTableDataset from .maria_db_table_dataset_py3 import MariaDBTableDataset from .magento_object_dataset_py3 import MagentoObjectDataset from .jira_object_dataset_py3 import JiraObjectDataset @@ -324,6 +326,7 @@ from .phoenix_source_py3 import PhoenixSource from .paypal_source_py3 import PaypalSource from .marketo_source_py3 import MarketoSource + from .azure_maria_db_source_py3 import AzureMariaDBSource from .maria_db_source_py3 import MariaDBSource from .magento_source_py3 import MagentoSource from .jira_source_py3 import JiraSource @@ -624,6 +627,7 @@ from .phoenix_linked_service import PhoenixLinkedService from .paypal_linked_service import PaypalLinkedService from .marketo_linked_service import MarketoLinkedService + from .azure_maria_db_linked_service import AzureMariaDBLinkedService from .maria_db_linked_service import MariaDBLinkedService from .magento_linked_service import MagentoLinkedService from .jira_linked_service import JiraLinkedService @@ -713,6 +717,7 @@ from .phoenix_object_dataset import PhoenixObjectDataset from .paypal_object_dataset import PaypalObjectDataset from .marketo_object_dataset import MarketoObjectDataset + from .azure_maria_db_table_dataset import AzureMariaDBTableDataset from .maria_db_table_dataset import MariaDBTableDataset from .magento_object_dataset import MagentoObjectDataset from .jira_object_dataset import JiraObjectDataset @@ -829,6 +834,7 @@ from .phoenix_source import PhoenixSource from .paypal_source import PaypalSource from .marketo_source import MarketoSource + from .azure_maria_db_source import AzureMariaDBSource from .maria_db_source import MariaDBSource from .magento_source import MagentoSource from .jira_source import JiraSource @@ -1210,6 +1216,7 @@ 'PhoenixLinkedService', 'PaypalLinkedService', 'MarketoLinkedService', + 'AzureMariaDBLinkedService', 'MariaDBLinkedService', 'MagentoLinkedService', 'JiraLinkedService', @@ -1299,6 +1306,7 @@ 'PhoenixObjectDataset', 'PaypalObjectDataset', 'MarketoObjectDataset', + 'AzureMariaDBTableDataset', 'MariaDBTableDataset', 'MagentoObjectDataset', 'JiraObjectDataset', @@ -1415,6 +1423,7 @@ 'PhoenixSource', 'PaypalSource', 'MarketoSource', + 'AzureMariaDBSource', 'MariaDBSource', 'MagentoSource', 'JiraSource', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py new file mode 100644 index 000000000000..d2dc7db88851 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py new file mode 100644 index 000000000000..c80015ed6b45 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py new file mode 100644 index 000000000000..229e6f4311e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py new file mode 100644 index 000000000000..11358f899e51 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py new file mode 100644 index 000000000000..a06c722279f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..9c6fd648af20 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index b34d47b55280..72ad6e8e136d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -21,16 +21,16 @@ class CopySource(Model): SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, - HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, - EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, - AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, - OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, - FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, - SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, - SapHanaSource, SapEccSource, SapCloudForCustomerSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, MicrosoftAccessSource, InformixSource, RelationalSource, @@ -71,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 867d77421f95..960c56774e4c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -21,16 +21,16 @@ class CopySource(Model): SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, - HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, - EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, - AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, - OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, - FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, - SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, - SapHanaSource, SapEccSource, SapCloudForCustomerSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, MicrosoftAccessSource, InformixSource, RelationalSource, @@ -71,7 +71,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index ecafb9588fce..036e1912647e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -24,23 +24,23 @@ class Dataset(Model): XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, - JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, - HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, - GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, - CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, - AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, - WebTableDataset, SapTableResourceDataset, RestResourceDataset, - SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, - SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, - SalesforceObjectDataset, MicrosoftAccessTableDataset, - PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, - InformixTableDataset, RelationalTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, @@ -95,7 +95,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index 41614b28432a..4a7ab9e3db12 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -24,23 +24,23 @@ class Dataset(Model): XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, - JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, - HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, - GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, - CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, - AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, - WebTableDataset, SapTableResourceDataset, RestResourceDataset, - SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, - SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, - SalesforceObjectDataset, MicrosoftAccessTableDataset, - PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, - InformixTableDataset, RelationalTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, @@ -95,7 +95,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 6a11f632875d..2778a33fbb5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -28,14 +28,14 @@ class LinkedService(Model): XeroLinkedService, SquareLinkedService, SparkLinkedService, ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, @@ -89,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index 8fa23951db00..2b3e475c3075 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -28,14 +28,14 @@ class LinkedService(Model): XeroLinkedService, SquareLinkedService, SparkLinkedService, ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, @@ -89,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py index 8dff7a01ccaa..de19818aaa7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -33,6 +33,22 @@ class Office365Source(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object """ _validation = { @@ -45,8 +61,18 @@ class Office365Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, } def __init__(self, **kwargs): super(Office365Source, self).__init__(**kwargs) + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py index 25ae6340ae01..fc2c4b095904 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -33,6 +33,22 @@ class Office365Source(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object """ _validation = { @@ -45,8 +61,18 @@ class Office365Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py index a8c7efca21e3..f32d4d67e427 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -33,6 +33,19 @@ class RestSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with @@ -54,12 +67,20 @@ class RestSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, } def __init__(self, **kwargs): super(RestSource, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py index cf0878e050e0..5fcbb2f7a76d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -33,6 +33,19 @@ class RestSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with @@ -54,12 +67,20 @@ class RestSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules self.http_request_timeout = http_request_timeout self.request_interval = request_interval self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py index ea98207a18cf..d6dcbda60e36 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -34,6 +34,15 @@ class SapOpenHubSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object """ _validation = { @@ -46,8 +55,12 @@ class SapOpenHubSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } def __init__(self, **kwargs): super(SapOpenHubSource, self).__init__(**kwargs) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py index 9cfa4e5243b6..752ffd8554b0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -34,6 +34,15 @@ class SapOpenHubSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object """ _validation = { @@ -46,8 +55,12 @@ class SapOpenHubSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index ffc98f67bed2..826179f5fb63 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -21,8 +21,8 @@ from .pipeline_runs_operations import PipelineRunsOperations from .activity_runs_operations import ActivityRunsOperations from .triggers_operations import TriggersOperations -from .rerun_triggers_operations import RerunTriggersOperations from .trigger_runs_operations import TriggerRunsOperations +from .rerun_triggers_operations import RerunTriggersOperations __all__ = [ 'Operations', @@ -37,6 +37,6 @@ 'PipelineRunsOperations', 'ActivityRunsOperations', 'TriggersOperations', - 'RerunTriggersOperations', 'TriggerRunsOperations', + 'RerunTriggersOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py index 51e9b0ac37a3..e4e4774ae3bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py @@ -37,6 +37,65 @@ def __init__(self, client, config, serializer, deserializer): self.config = config + def rerun( + self, resource_group_name, factory_name, trigger_name, run_id, custom_headers=None, raw=False, **operation_config): + """Rerun single trigger instance by runId. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.rerun.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} + def query_by_factory( self, resource_group_name, factory_name, filter_parameters, custom_headers=None, raw=False, **operation_config): """Query trigger runs. From 0c65fd15ce857fd3f6ad6da6bdc1bf92ab92f315 Mon Sep 17 00:00:00 2001 From: zikalino Date: Wed, 7 Aug 2019 06:50:15 +0800 Subject: [PATCH 16/30] updated release notes --- .../azure-mgmt-datafactory/HISTORY.rst | 150 ++++++++++++++++++ .../azure/mgmt/datafactory/version.py | 2 +- 2 files changed, 151 insertions(+), 1 deletion(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst index c2beec74971f..53226433c3fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst @@ -3,6 +3,156 @@ Release History =============== +0.8.0 (2019-08-07) +++++++++++++++++++ + +**Features** + +- Model HubspotSource has a new parameter max_concurrent_connections +- Model CouchbaseSource has a new parameter max_concurrent_connections +- Model HttpSource has a new parameter max_concurrent_connections +- Model AzureDataLakeStoreSource has a new parameter max_concurrent_connections +- Model ConcurSource has a new parameter max_concurrent_connections +- Model FileShareDataset has a new parameter modified_datetime_start +- Model FileShareDataset has a new parameter modified_datetime_end +- Model SalesforceSource has a new parameter max_concurrent_connections +- Model NetezzaSource has a new parameter partition_option +- Model NetezzaSource has a new parameter max_concurrent_connections +- Model NetezzaSource has a new parameter partition_settings +- Model AzureMySqlSource has a new parameter max_concurrent_connections +- Model OdbcSink has a new parameter max_concurrent_connections +- Model ImpalaObjectDataset has a new parameter impala_object_dataset_schema +- Model ImpalaObjectDataset has a new parameter table +- Model AzureSqlDWTableDataset has a new parameter azure_sql_dw_table_dataset_schema +- Model AzureSqlDWTableDataset has a new parameter table +- Model SapEccSource has a new parameter max_concurrent_connections +- Model CopySource has a new parameter max_concurrent_connections +- Model ServiceNowSource has a new parameter max_concurrent_connections +- Model Trigger has a new parameter annotations +- Model CassandraSource has a new parameter max_concurrent_connections +- Model AzureQueueSink has a new parameter max_concurrent_connections +- Model DrillSource has a new parameter max_concurrent_connections +- Model DocumentDbCollectionSink has a new parameter write_behavior +- Model DocumentDbCollectionSink has a new parameter max_concurrent_connections +- Model SapHanaLinkedService has a new parameter connection_string +- Model SalesforceSink has a new parameter max_concurrent_connections +- Model HiveObjectDataset has a new parameter hive_object_dataset_schema +- Model HiveObjectDataset has a new parameter table +- Model GoogleBigQueryObjectDataset has a new parameter dataset +- Model GoogleBigQueryObjectDataset has a new parameter table +- Model FileSystemSource has a new parameter max_concurrent_connections +- Model SqlSink has a new parameter stored_procedure_table_type_parameter_name +- Model SqlSink has a new parameter max_concurrent_connections +- Model CopySink has a new parameter max_concurrent_connections +- Model SapCloudForCustomerSource has a new parameter max_concurrent_connections +- Model CopyActivity has a new parameter preserve_rules +- Model CopyActivity has a new parameter preserve +- Model AmazonMWSSource has a new parameter max_concurrent_connections +- Model SqlDWSink has a new parameter max_concurrent_connections +- Model MagentoSource has a new parameter max_concurrent_connections +- Model BlobEventsTrigger has a new parameter annotations +- Model DynamicsSink has a new parameter max_concurrent_connections +- Model AzurePostgreSqlTableDataset has a new parameter table +- Model AzurePostgreSqlTableDataset has a new parameter azure_postgre_sql_table_dataset_schema +- Model SqlServerTableDataset has a new parameter sql_server_table_dataset_schema +- Model SqlServerTableDataset has a new parameter table +- Model DocumentDbCollectionSource has a new parameter max_concurrent_connections +- Model AzurePostgreSqlSource has a new parameter max_concurrent_connections +- Model BlobSource has a new parameter max_concurrent_connections +- Model VerticaTableDataset has a new parameter vertica_table_dataset_schema +- Model VerticaTableDataset has a new parameter table +- Model PhoenixObjectDataset has a new parameter phoenix_object_dataset_schema +- Model PhoenixObjectDataset has a new parameter table +- Model AzureSearchIndexSink has a new parameter max_concurrent_connections +- Model MarketoSource has a new parameter max_concurrent_connections +- Model DynamicsSource has a new parameter max_concurrent_connections +- Model SparkObjectDataset has a new parameter spark_object_dataset_schema +- Model SparkObjectDataset has a new parameter table +- Model XeroSource has a new parameter max_concurrent_connections +- Model AmazonRedshiftSource has a new parameter max_concurrent_connections +- Model CustomActivity has a new parameter retention_time_in_days +- Model WebSource has a new parameter max_concurrent_connections +- Model GreenplumTableDataset has a new parameter greenplum_table_dataset_schema +- Model GreenplumTableDataset has a new parameter table +- Model SalesforceMarketingCloudSource has a new parameter max_concurrent_connections +- Model GoogleBigQuerySource has a new parameter max_concurrent_connections +- Model JiraSource has a new parameter max_concurrent_connections +- Model MongoDbSource has a new parameter max_concurrent_connections +- Model DrillTableDataset has a new parameter drill_table_dataset_schema +- Model DrillTableDataset has a new parameter table +- Model ExecuteSSISPackageActivity has a new parameter log_location +- Model SparkSource has a new parameter max_concurrent_connections +- Model AzureTableSink has a new parameter max_concurrent_connections +- Model AzureDataLakeStoreSink has a new parameter enable_adls_single_file_parallel +- Model AzureDataLakeStoreSink has a new parameter max_concurrent_connections +- Model PrestoSource has a new parameter max_concurrent_connections +- Model RelationalSource has a new parameter max_concurrent_connections +- Model TumblingWindowTrigger has a new parameter annotations +- Model ImpalaSource has a new parameter max_concurrent_connections +- Model ScheduleTrigger has a new parameter annotations +- Model QuickBooksSource has a new parameter max_concurrent_connections +- Model PrestoObjectDataset has a new parameter presto_object_dataset_schema +- Model PrestoObjectDataset has a new parameter table +- Model OracleSink has a new parameter max_concurrent_connections +- Model HdfsSource has a new parameter max_concurrent_connections +- Model PhoenixSource has a new parameter max_concurrent_connections +- Model SapCloudForCustomerSink has a new parameter max_concurrent_connections +- Model SquareSource has a new parameter max_concurrent_connections +- Model OracleSource has a new parameter partition_option +- Model OracleSource has a new parameter max_concurrent_connections +- Model OracleSource has a new parameter partition_settings +- Model BlobTrigger has a new parameter annotations +- Model HDInsightOnDemandLinkedService has a new parameter virtual_network_id +- Model HDInsightOnDemandLinkedService has a new parameter subnet_name +- Model AmazonS3LinkedService has a new parameter service_url +- Model HDInsightLinkedService has a new parameter file_system +- Model MultiplePipelineTrigger has a new parameter annotations +- Model HBaseSource has a new parameter max_concurrent_connections +- Model OracleTableDataset has a new parameter oracle_table_dataset_schema +- Model OracleTableDataset has a new parameter table +- Model RerunTumblingWindowTrigger has a new parameter annotations +- Model EloquaSource has a new parameter max_concurrent_connections +- Model AzureSqlTableDataset has a new parameter azure_sql_table_dataset_schema +- Model AzureSqlTableDataset has a new parameter table +- Model BlobSink has a new parameter max_concurrent_connections +- Model HiveSource has a new parameter max_concurrent_connections +- Model SqlSource has a new parameter max_concurrent_connections +- Model PaypalSource has a new parameter max_concurrent_connections +- Model AzureBlobDataset has a new parameter modified_datetime_start +- Model AzureBlobDataset has a new parameter modified_datetime_end +- Model VerticaSource has a new parameter max_concurrent_connections +- Model AmazonS3Dataset has a new parameter modified_datetime_start +- Model AmazonS3Dataset has a new parameter modified_datetime_end +- Model PipelineRun has a new parameter run_group_id +- Model PipelineRun has a new parameter is_latest +- Model ShopifySource has a new parameter max_concurrent_connections +- Model MariaDBSource has a new parameter max_concurrent_connections +- Model TeradataLinkedService has a new parameter connection_string +- Model ODataLinkedService has a new parameter service_principal_embedded_cert +- Model ODataLinkedService has a new parameter aad_service_principal_credential_type +- Model ODataLinkedService has a new parameter service_principal_key +- Model ODataLinkedService has a new parameter service_principal_id +- Model ODataLinkedService has a new parameter aad_resource_id +- Model ODataLinkedService has a new parameter service_principal_embedded_cert_password +- Model ODataLinkedService has a new parameter tenant +- Model AzureTableSource has a new parameter max_concurrent_connections +- Model IntegrationRuntimeSsisProperties has a new parameter data_proxy_properties +- Model ZohoSource has a new parameter max_concurrent_connections +- Model ResponsysSource has a new parameter max_concurrent_connections +- Model FileSystemSink has a new parameter max_concurrent_connections +- Model SqlDWSource has a new parameter max_concurrent_connections +- Model GreenplumSource has a new parameter max_concurrent_connections +- Model AzureDatabricksLinkedService has a new parameter new_cluster_init_scripts +- Model AzureDatabricksLinkedService has a new parameter new_cluster_driver_node_type +- Model AzureDatabricksLinkedService has a new parameter new_cluster_enable_elastic_disk +- Added operation TriggerRunsOperations.rerun +- Added operation ExposureControlOperations.get_feature_value_by_factory + +**Breaking changes** + +- Operation PipelinesOperations.create_run has a new signature +- Operation PipelinesOperations.create_run has a new signature + 0.7.0 (2019-01-31) ++++++++++++++++++ diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index a39916c162ce..e4f3d5055303 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "0.8.0" From 326a827710e2109699beb8a9fc0e1f5839e4bdca Mon Sep 17 00:00:00 2001 From: zikalino Date: Wed, 7 Aug 2019 06:57:04 +0800 Subject: [PATCH 17/30] fixed duplicate row --- sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst index 53226433c3fa..b60fdfc38738 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst @@ -151,8 +151,7 @@ Release History **Breaking changes** - Operation PipelinesOperations.create_run has a new signature -- Operation PipelinesOperations.create_run has a new signature - +- Model SSISPackageLocation has a new signature 0.7.0 (2019-01-31) ++++++++++++++++++ From 9f78b50360601f4b86b41af3cc162ff0fb6b8f2b Mon Sep 17 00:00:00 2001 From: zikalino Date: Wed, 7 Aug 2019 06:57:50 +0800 Subject: [PATCH 18/30] breaking changes --- sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst index b60fdfc38738..3db402353eb7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst @@ -152,6 +152,7 @@ Release History - Operation PipelinesOperations.create_run has a new signature - Model SSISPackageLocation has a new signature + 0.7.0 (2019-01-31) ++++++++++++++++++ From 6e95bc530c801b0b3812ab6d022f7ce0f3f8f844 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 14 Aug 2019 02:21:06 -0700 Subject: [PATCH 19/30] Generated from 65a2679abd2e6a4aa56f0d4e5ef459407f105ae6 (#6774) [DataFactory]Fix typo for binary sink --- .../azure/mgmt/datafactory/__init__.py | 7 +- .../azure/mgmt/datafactory/_configuration.py | 48 + ....py => _data_factory_management_client.py} | 64 +- .../azure/mgmt/datafactory/models/__init__.py | 2987 +- ... _data_factory_management_client_enums.py} | 0 .../azure/mgmt/datafactory/models/_models.py | 28730 ++++++++++++++++ .../mgmt/datafactory/models/_models_py3.py | 28730 ++++++++++++++++ .../mgmt/datafactory/models/_paged_models.py | 118 + .../models/access_policy_response.py | 36 - .../models/access_policy_response_py3.py | 36 - .../azure/mgmt/datafactory/models/activity.py | 63 - .../datafactory/models/activity_dependency.py | 46 - .../models/activity_dependency_py3.py | 46 - .../datafactory/models/activity_policy.py | 59 - .../datafactory/models/activity_policy_py3.py | 59 - .../mgmt/datafactory/models/activity_py3.py | 63 - .../mgmt/datafactory/models/activity_run.py | 102 - .../datafactory/models/activity_run_py3.py | 102 - .../models/activity_runs_query_response.py | 39 - .../activity_runs_query_response_py3.py | 39 - .../models/amazon_mws_linked_service.py | 106 - .../models/amazon_mws_linked_service_py3.py | 106 - .../models/amazon_mws_object_dataset.py | 72 - .../models/amazon_mws_object_dataset_py3.py | 72 - .../datafactory/models/amazon_mws_source.py | 57 - .../models/amazon_mws_source_py3.py | 57 - .../models/amazon_redshift_linked_service.py | 86 - .../amazon_redshift_linked_service_py3.py | 86 - .../models/amazon_redshift_source.py | 65 - .../models/amazon_redshift_source_py3.py | 65 - .../datafactory/models/amazon_s3_dataset.py | 107 - .../models/amazon_s3_dataset_py3.py | 107 - .../models/amazon_s3_linked_service.py | 77 - .../models/amazon_s3_linked_service_py3.py | 77 - .../datafactory/models/amazon_s3_location.py | 55 - .../models/amazon_s3_location_py3.py | 55 - .../models/amazon_s3_read_settings.py | 78 - .../models/amazon_s3_read_settings_py3.py | 78 - .../models/append_variable_activity.py | 60 - .../models/append_variable_activity_py3.py | 60 - .../mgmt/datafactory/models/avro_dataset.py | 83 - .../datafactory/models/avro_dataset_py3.py | 83 - .../mgmt/datafactory/models/avro_format.py | 46 - .../datafactory/models/avro_format_py3.py | 46 - .../mgmt/datafactory/models/avro_sink.py | 69 - .../mgmt/datafactory/models/avro_sink_py3.py | 69 - .../mgmt/datafactory/models/avro_source.py | 56 - .../datafactory/models/avro_source_py3.py | 56 - .../datafactory/models/avro_write_settings.py | 46 - .../models/avro_write_settings_py3.py | 46 - .../models/azure_batch_linked_service.py | 88 - .../models/azure_batch_linked_service_py3.py | 88 - .../datafactory/models/azure_blob_dataset.py | 100 - .../models/azure_blob_dataset_py3.py | 100 - .../models/azure_blob_fs_dataset.py | 85 - .../models/azure_blob_fs_dataset_py3.py | 85 - .../models/azure_blob_fs_linked_service.py | 86 - .../azure_blob_fs_linked_service_py3.py | 86 - .../models/azure_blob_fs_location.py | 50 - .../models/azure_blob_fs_location_py3.py | 50 - .../models/azure_blob_fs_read_settings.py | 73 - .../models/azure_blob_fs_read_settings_py3.py | 73 - .../datafactory/models/azure_blob_fs_sink.py | 65 - .../models/azure_blob_fs_sink_py3.py | 65 - .../models/azure_blob_fs_source.py | 68 - .../models/azure_blob_fs_source_py3.py | 68 - .../models/azure_blob_fs_write_settings.py | 45 - .../azure_blob_fs_write_settings_py3.py | 45 - .../azure_blob_storage_linked_service.py | 104 - .../azure_blob_storage_linked_service_py3.py | 104 - .../models/azure_blob_storage_location.py | 50 - .../models/azure_blob_storage_location_py3.py | 50 - .../azure_blob_storage_read_settings.py | 73 - .../azure_blob_storage_read_settings_py3.py | 73 - .../azure_blob_storage_write_settings.py | 45 - .../azure_blob_storage_write_settings_py3.py | 45 - .../azure_data_explorer_command_activity.py | 71 - ...zure_data_explorer_command_activity_py3.py | 71 - .../azure_data_explorer_linked_service.py | 86 - .../azure_data_explorer_linked_service_py3.py | 86 - .../models/azure_data_explorer_sink.py | 76 - .../models/azure_data_explorer_sink_py3.py | 76 - .../models/azure_data_explorer_source.py | 70 - .../models/azure_data_explorer_source_py3.py | 70 - .../azure_data_explorer_table_dataset.py | 72 - .../azure_data_explorer_table_dataset_py3.py | 72 - ...zure_data_lake_analytics_linked_service.py | 99 - ..._data_lake_analytics_linked_service_py3.py | 99 - .../models/azure_data_lake_store_dataset.py | 86 - .../azure_data_lake_store_dataset_py3.py | 86 - .../azure_data_lake_store_linked_service.py | 98 - ...zure_data_lake_store_linked_service_py3.py | 98 - .../models/azure_data_lake_store_location.py | 45 - .../azure_data_lake_store_location_py3.py | 45 - .../azure_data_lake_store_read_settings.py | 73 - ...azure_data_lake_store_read_settings_py3.py | 73 - .../models/azure_data_lake_store_sink.py | 69 - .../models/azure_data_lake_store_sink_py3.py | 69 - .../models/azure_data_lake_store_source.py | 58 - .../azure_data_lake_store_source_py3.py | 58 - .../azure_data_lake_store_write_settings.py | 45 - ...zure_data_lake_store_write_settings_py3.py | 45 - .../models/azure_databricks_linked_service.py | 126 - .../azure_databricks_linked_service_py3.py | 126 - .../models/azure_function_activity.py | 85 - .../models/azure_function_activity_py3.py | 85 - .../models/azure_function_linked_service.py | 69 - .../azure_function_linked_service_py3.py | 69 - .../models/azure_key_vault_linked_service.py | 60 - .../azure_key_vault_linked_service_py3.py | 60 - .../azure_key_vault_secret_reference.py | 51 - .../azure_key_vault_secret_reference_py3.py | 51 - .../models/azure_maria_db_linked_service.py | 69 - .../azure_maria_db_linked_service_py3.py | 69 - .../models/azure_maria_db_source.py | 57 - .../models/azure_maria_db_source_py3.py | 57 - .../models/azure_maria_db_table_dataset.py | 72 - .../azure_maria_db_table_dataset_py3.py | 72 - .../azure_ml_batch_execution_activity.py | 82 - .../azure_ml_batch_execution_activity_py3.py | 82 - .../models/azure_ml_linked_service.py | 94 - .../models/azure_ml_linked_service_py3.py | 94 - .../azure_ml_update_resource_activity.py | 81 - .../azure_ml_update_resource_activity_py3.py | 81 - .../models/azure_ml_web_service_file.py | 43 - .../models/azure_ml_web_service_file_py3.py | 43 - .../models/azure_my_sql_linked_service.py | 71 - .../models/azure_my_sql_linked_service_py3.py | 71 - .../datafactory/models/azure_my_sql_source.py | 57 - .../models/azure_my_sql_source_py3.py | 57 - .../models/azure_my_sql_table_dataset.py | 72 - .../models/azure_my_sql_table_dataset_py3.py | 72 - .../azure_postgre_sql_linked_service.py | 70 - .../azure_postgre_sql_linked_service_py3.py | 70 - .../models/azure_postgre_sql_sink.py | 66 - .../models/azure_postgre_sql_sink_py3.py | 66 - .../models/azure_postgre_sql_source.py | 57 - .../models/azure_postgre_sql_source_py3.py | 57 - .../models/azure_postgre_sql_table_dataset.py | 84 - .../azure_postgre_sql_table_dataset_py3.py | 84 - .../datafactory/models/azure_queue_sink.py | 61 - .../models/azure_queue_sink_py3.py | 61 - .../models/azure_search_index_dataset.py | 73 - .../models/azure_search_index_dataset_py3.py | 73 - .../models/azure_search_index_sink.py | 67 - .../models/azure_search_index_sink_py3.py | 67 - .../models/azure_search_linked_service.py | 69 - .../models/azure_search_linked_service_py3.py | 69 - .../azure_sql_database_linked_service.py | 87 - .../azure_sql_database_linked_service_py3.py | 87 - .../models/azure_sql_dw_linked_service.py | 88 - .../models/azure_sql_dw_linked_service_py3.py | 88 - .../models/azure_sql_dw_table_dataset.py | 82 - .../models/azure_sql_dw_table_dataset_py3.py | 82 - .../models/azure_sql_mi_linked_service.py | 87 - .../models/azure_sql_mi_linked_service_py3.py | 87 - .../models/azure_sql_mi_table_dataset.py | 82 - .../models/azure_sql_mi_table_dataset_py3.py | 82 - .../mgmt/datafactory/models/azure_sql_sink.py | 87 - .../datafactory/models/azure_sql_sink_py3.py | 87 - .../datafactory/models/azure_sql_source.py | 73 - .../models/azure_sql_source_py3.py | 73 - .../models/azure_sql_table_dataset.py | 82 - .../models/azure_sql_table_dataset_py3.py | 82 - .../models/azure_storage_linked_service.py | 83 - .../azure_storage_linked_service_py3.py | 83 - .../datafactory/models/azure_table_dataset.py | 73 - .../models/azure_table_dataset_py3.py | 73 - .../datafactory/models/azure_table_sink.py | 81 - .../models/azure_table_sink_py3.py | 81 - .../datafactory/models/azure_table_source.py | 63 - .../models/azure_table_source_py3.py | 63 - .../azure_table_storage_linked_service.py | 83 - .../azure_table_storage_linked_service_py3.py | 83 - .../mgmt/datafactory/models/binary_dataset.py | 77 - .../datafactory/models/binary_dataset_py3.py | 77 - .../mgmt/datafactory/models/binary_sink.py | 65 - .../datafactory/models/binary_sink_py3.py | 65 - .../mgmt/datafactory/models/binary_source.py | 56 - .../datafactory/models/binary_source_py3.py | 56 - .../datafactory/models/blob_events_trigger.py | 85 - .../models/blob_events_trigger_py3.py | 85 - .../mgmt/datafactory/models/blob_sink.py | 80 - .../mgmt/datafactory/models/blob_sink_py3.py | 80 - .../mgmt/datafactory/models/blob_source.py | 68 - .../datafactory/models/blob_source_py3.py | 68 - .../mgmt/datafactory/models/blob_trigger.py | 78 - .../datafactory/models/blob_trigger_py3.py | 78 - .../models/cassandra_linked_service.py | 84 - .../models/cassandra_linked_service_py3.py | 84 - .../datafactory/models/cassandra_source.py | 70 - .../models/cassandra_source_py3.py | 70 - .../models/cassandra_table_dataset.py | 77 - .../models/cassandra_table_dataset_py3.py | 77 - ...on_data_service_for_apps_entity_dataset.py | 72 - ...ata_service_for_apps_entity_dataset_py3.py | 72 - ...on_data_service_for_apps_linked_service.py | 115 - ...ata_service_for_apps_linked_service_py3.py | 115 - .../common_data_service_for_apps_sink.py | 77 - .../common_data_service_for_apps_sink_py3.py | 77 - .../common_data_service_for_apps_source.py | 58 - ...common_data_service_for_apps_source_py3.py | 58 - .../models/concur_linked_service.py | 92 - .../models/concur_linked_service_py3.py | 92 - .../models/concur_object_dataset.py | 72 - .../models/concur_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/concur_source.py | 57 - .../datafactory/models/concur_source_py3.py | 57 - .../datafactory/models/control_activity.py | 60 - .../models/control_activity_py3.py | 60 - .../mgmt/datafactory/models/copy_activity.py | 124 - .../datafactory/models/copy_activity_py3.py | 124 - .../mgmt/datafactory/models/copy_sink.py | 81 - .../mgmt/datafactory/models/copy_sink_py3.py | 81 - .../mgmt/datafactory/models/copy_source.py | 83 - .../datafactory/models/copy_source_py3.py | 83 - .../models/cosmos_db_linked_service.py | 71 - .../models/cosmos_db_linked_service_py3.py | 71 - ...smos_db_mongo_db_api_collection_dataset.py | 73 - ..._db_mongo_db_api_collection_dataset_py3.py | 73 - .../cosmos_db_mongo_db_api_linked_service.py | 67 - ...smos_db_mongo_db_api_linked_service_py3.py | 67 - .../models/cosmos_db_mongo_db_api_sink.py | 68 - .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 - .../models/cosmos_db_mongo_db_api_source.py | 71 - .../cosmos_db_mongo_db_api_source_py3.py | 71 - .../models/couchbase_linked_service.py | 70 - .../models/couchbase_linked_service_py3.py | 70 - .../datafactory/models/couchbase_source.py | 57 - .../models/couchbase_source_py3.py | 57 - .../models/couchbase_table_dataset.py | 72 - .../models/couchbase_table_dataset_py3.py | 72 - ...eate_linked_integration_runtime_request.py | 43 - ..._linked_integration_runtime_request_py3.py | 43 - .../datafactory/models/create_run_response.py | 34 - .../models/create_run_response_py3.py | 34 - .../datafactory/models/custom_activity.py | 91 - .../datafactory/models/custom_activity_py3.py | 91 - .../custom_activity_reference_object.py | 33 - .../custom_activity_reference_object_py3.py | 33 - .../custom_data_source_linked_service.py | 58 - .../custom_data_source_linked_service_py3.py | 58 - .../mgmt/datafactory/models/custom_dataset.py | 71 - .../datafactory/models/custom_dataset_py3.py | 71 - .../data_lake_analytics_usql_activity.py | 98 - .../data_lake_analytics_usql_activity_py3.py | 98 - .../models/databricks_notebook_activity.py | 76 - .../databricks_notebook_activity_py3.py | 76 - .../models/databricks_spark_jar_activity.py | 75 - .../databricks_spark_jar_activity_py3.py | 75 - .../databricks_spark_python_activity.py | 75 - .../databricks_spark_python_activity_py3.py | 75 - .../azure/mgmt/datafactory/models/dataset.py | 111 - .../models/dataset_bzip2_compression.py | 38 - .../models/dataset_bzip2_compression_py3.py | 38 - .../datafactory/models/dataset_compression.py | 47 - .../models/dataset_compression_py3.py | 47 - .../models/dataset_deflate_compression.py | 42 - .../models/dataset_deflate_compression_py3.py | 42 - .../mgmt/datafactory/models/dataset_folder.py | 29 - .../datafactory/models/dataset_folder_py3.py | 29 - .../models/dataset_gzip_compression.py | 42 - .../models/dataset_gzip_compression_py3.py | 42 - .../datafactory/models/dataset_location.py | 49 - .../models/dataset_location_py3.py | 49 - .../mgmt/datafactory/models/dataset_py3.py | 111 - .../datafactory/models/dataset_reference.py | 48 - .../models/dataset_reference_py3.py | 48 - .../datafactory/models/dataset_resource.py | 53 - .../models/dataset_resource_paged.py | 27 - .../models/dataset_resource_py3.py | 53 - .../models/dataset_storage_format.py | 57 - .../models/dataset_storage_format_py3.py | 57 - .../models/dataset_zip_deflate_compression.py | 42 - .../dataset_zip_deflate_compression_py3.py | 42 - .../datafactory/models/db2_linked_service.py | 86 - .../models/db2_linked_service_py3.py | 86 - .../mgmt/datafactory/models/db2_source.py | 57 - .../mgmt/datafactory/models/db2_source_py3.py | 57 - .../datafactory/models/delete_activity.py | 87 - .../datafactory/models/delete_activity_py3.py | 87 - .../models/delimited_text_dataset.py | 122 - .../models/delimited_text_dataset_py3.py | 122 - .../models/delimited_text_read_settings.py | 43 - .../delimited_text_read_settings_py3.py | 43 - .../datafactory/models/delimited_text_sink.py | 70 - .../models/delimited_text_sink_py3.py | 70 - .../models/delimited_text_source.py | 61 - .../models/delimited_text_source_py3.py | 61 - .../models/delimited_text_write_settings.py | 49 - .../delimited_text_write_settings_py3.py | 49 - .../models/dependency_reference.py | 42 - .../models/dependency_reference_py3.py | 42 - .../datafactory/models/distcp_settings.py | 49 - .../datafactory/models/distcp_settings_py3.py | 49 - .../models/document_db_collection_dataset.py | 73 - .../document_db_collection_dataset_py3.py | 73 - .../models/document_db_collection_sink.py | 71 - .../models/document_db_collection_sink_py3.py | 71 - .../models/document_db_collection_source.py | 62 - .../document_db_collection_source_py3.py | 62 - .../models/drill_linked_service.py | 69 - .../models/drill_linked_service_py3.py | 69 - .../mgmt/datafactory/models/drill_source.py | 57 - .../datafactory/models/drill_source_py3.py | 57 - .../datafactory/models/drill_table_dataset.py | 82 - .../models/drill_table_dataset_py3.py | 82 - .../models/dynamics_ax_linked_service.py | 93 - .../models/dynamics_ax_linked_service_py3.py | 93 - .../models/dynamics_ax_resource_dataset.py | 73 - .../dynamics_ax_resource_dataset_py3.py | 73 - .../datafactory/models/dynamics_ax_source.py | 57 - .../models/dynamics_ax_source_py3.py | 57 - .../models/dynamics_crm_entity_dataset.py | 72 - .../models/dynamics_crm_entity_dataset_py3.py | 72 - .../models/dynamics_crm_linked_service.py | 112 - .../models/dynamics_crm_linked_service_py3.py | 112 - .../datafactory/models/dynamics_crm_sink.py | 77 - .../models/dynamics_crm_sink_py3.py | 77 - .../datafactory/models/dynamics_crm_source.py | 58 - .../models/dynamics_crm_source_py3.py | 58 - .../models/dynamics_entity_dataset.py | 72 - .../models/dynamics_entity_dataset_py3.py | 72 - .../models/dynamics_linked_service.py | 109 - .../models/dynamics_linked_service_py3.py | 109 - .../mgmt/datafactory/models/dynamics_sink.py | 77 - .../datafactory/models/dynamics_sink_py3.py | 77 - .../datafactory/models/dynamics_source.py | 58 - .../datafactory/models/dynamics_source_py3.py | 58 - .../models/eloqua_linked_service.py | 91 - .../models/eloqua_linked_service_py3.py | 91 - .../models/eloqua_object_dataset.py | 72 - .../models/eloqua_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/eloqua_source.py | 57 - .../datafactory/models/eloqua_source_py3.py | 57 - .../datafactory/models/entity_reference.py | 34 - .../models/entity_reference_py3.py | 34 - .../models/execute_pipeline_activity.py | 65 - .../models/execute_pipeline_activity_py3.py | 65 - .../models/execute_ssis_package_activity.py | 124 - .../execute_ssis_package_activity_py3.py | 124 - .../datafactory/models/execution_activity.py | 75 - .../models/execution_activity_py3.py | 75 - .../models/exposure_control_request.py | 32 - .../models/exposure_control_request_py3.py | 32 - .../models/exposure_control_response.py | 40 - .../models/exposure_control_response_py3.py | 40 - .../mgmt/datafactory/models/expression.py | 43 - .../mgmt/datafactory/models/expression_py3.py | 43 - .../azure/mgmt/datafactory/models/factory.py | 81 - .../models/factory_git_hub_configuration.py | 58 - .../factory_git_hub_configuration_py3.py | 58 - .../datafactory/models/factory_identity.py | 49 - .../models/factory_identity_py3.py | 49 - .../mgmt/datafactory/models/factory_paged.py | 27 - .../mgmt/datafactory/models/factory_py3.py | 81 - .../models/factory_repo_configuration.py | 65 - .../models/factory_repo_configuration_py3.py | 65 - .../datafactory/models/factory_repo_update.py | 33 - .../models/factory_repo_update_py3.py | 33 - .../models/factory_update_parameters.py | 32 - .../models/factory_update_parameters_py3.py | 32 - .../models/factory_vsts_configuration.py | 62 - .../models/factory_vsts_configuration_py3.py | 62 - .../models/file_server_linked_service.py | 74 - .../models/file_server_linked_service_py3.py | 74 - .../models/file_server_location.py | 45 - .../models/file_server_location_py3.py | 45 - .../models/file_server_read_settings.py | 73 - .../models/file_server_read_settings_py3.py | 73 - .../models/file_server_write_settings.py | 45 - .../models/file_server_write_settings_py3.py | 45 - .../datafactory/models/file_share_dataset.py | 101 - .../models/file_share_dataset_py3.py | 101 - .../datafactory/models/file_system_sink.py | 65 - .../models/file_system_sink_py3.py | 65 - .../datafactory/models/file_system_source.py | 58 - .../models/file_system_source_py3.py | 58 - .../datafactory/models/filter_activity.py | 61 - .../datafactory/models/filter_activity_py3.py | 61 - .../datafactory/models/for_each_activity.py | 73 - .../models/for_each_activity_py3.py | 73 - .../models/format_read_settings.py | 39 - .../models/format_read_settings_py3.py | 39 - .../models/format_write_settings.py | 39 - .../models/format_write_settings_py3.py | 39 - .../datafactory/models/ftp_read_settings.py | 63 - .../models/ftp_read_settings_py3.py | 63 - .../models/ftp_server_linked_service.py | 98 - .../models/ftp_server_linked_service_py3.py | 98 - .../datafactory/models/ftp_server_location.py | 45 - .../models/ftp_server_location_py3.py | 45 - .../models/get_metadata_activity.py | 67 - .../models/get_metadata_activity_py3.py | 67 - .../get_ssis_object_metadata_request.py | 28 - .../get_ssis_object_metadata_request_py3.py | 28 - .../models/git_hub_access_token_request.py | 44 - .../git_hub_access_token_request_py3.py | 44 - .../models/git_hub_access_token_response.py | 28 - .../git_hub_access_token_response_py3.py | 28 - .../models/google_ad_words_linked_service.py | 119 - .../google_ad_words_linked_service_py3.py | 119 - .../models/google_ad_words_object_dataset.py | 72 - .../google_ad_words_object_dataset_py3.py | 72 - .../models/google_ad_words_source.py | 57 - .../models/google_ad_words_source_py3.py | 57 - .../models/google_big_query_linked_service.py | 124 - .../google_big_query_linked_service_py3.py | 124 - .../models/google_big_query_object_dataset.py | 82 - .../google_big_query_object_dataset_py3.py | 82 - .../models/google_big_query_source.py | 57 - .../models/google_big_query_source_py3.py | 57 - .../models/greenplum_linked_service.py | 69 - .../models/greenplum_linked_service_py3.py | 69 - .../datafactory/models/greenplum_source.py | 57 - .../models/greenplum_source_py3.py | 57 - .../models/greenplum_table_dataset.py | 82 - .../models/greenplum_table_dataset_py3.py | 82 - .../models/hbase_linked_service.py | 114 - .../models/hbase_linked_service_py3.py | 114 - .../models/hbase_object_dataset.py | 72 - .../models/hbase_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/hbase_source.py | 57 - .../datafactory/models/hbase_source_py3.py | 57 - .../models/hd_insight_hive_activity.py | 96 - .../models/hd_insight_hive_activity_py3.py | 96 - .../models/hd_insight_linked_service.py | 96 - .../models/hd_insight_linked_service_py3.py | 96 - .../models/hd_insight_map_reduce_activity.py | 99 - .../hd_insight_map_reduce_activity_py3.py | 99 - .../hd_insight_on_demand_linked_service.py | 237 - ...hd_insight_on_demand_linked_service_py3.py | 237 - .../models/hd_insight_pig_activity.py | 87 - .../models/hd_insight_pig_activity_py3.py | 87 - .../models/hd_insight_spark_activity.py | 100 - .../models/hd_insight_spark_activity_py3.py | 100 - .../models/hd_insight_streaming_activity.py | 122 - .../hd_insight_streaming_activity_py3.py | 122 - .../datafactory/models/hdfs_linked_service.py | 81 - .../models/hdfs_linked_service_py3.py | 81 - .../mgmt/datafactory/models/hdfs_location.py | 45 - .../datafactory/models/hdfs_location_py3.py | 45 - .../datafactory/models/hdfs_read_settings.py | 77 - .../models/hdfs_read_settings_py3.py | 77 - .../mgmt/datafactory/models/hdfs_source.py | 62 - .../datafactory/models/hdfs_source_py3.py | 62 - .../datafactory/models/hive_linked_service.py | 147 - .../models/hive_linked_service_py3.py | 147 - .../datafactory/models/hive_object_dataset.py | 82 - .../models/hive_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/hive_source.py | 57 - .../datafactory/models/hive_source_py3.py | 57 - .../mgmt/datafactory/models/http_dataset.py | 99 - .../datafactory/models/http_dataset_py3.py | 99 - .../datafactory/models/http_linked_service.py | 105 - .../models/http_linked_service_py3.py | 105 - .../datafactory/models/http_read_settings.py | 63 - .../models/http_read_settings_py3.py | 63 - .../models/http_server_location.py | 50 - .../models/http_server_location_py3.py | 50 - .../mgmt/datafactory/models/http_source.py | 60 - .../datafactory/models/http_source_py3.py | 60 - .../models/hubspot_linked_service.py | 96 - .../models/hubspot_linked_service_py3.py | 96 - .../models/hubspot_object_dataset.py | 72 - .../models/hubspot_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/hubspot_source.py | 57 - .../datafactory/models/hubspot_source_py3.py | 57 - .../models/if_condition_activity.py | 72 - .../models/if_condition_activity_py3.py | 72 - .../models/impala_linked_service.py | 117 - .../models/impala_linked_service_py3.py | 117 - .../models/impala_object_dataset.py | 82 - .../models/impala_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/impala_source.py | 57 - .../datafactory/models/impala_source_py3.py | 57 - .../models/informix_linked_service.py | 86 - .../models/informix_linked_service_py3.py | 86 - .../mgmt/datafactory/models/informix_sink.py | 66 - .../datafactory/models/informix_sink_py3.py | 66 - .../datafactory/models/informix_source.py | 57 - .../datafactory/models/informix_source_py3.py | 57 - .../models/informix_table_dataset.py | 72 - .../models/informix_table_dataset_py3.py | 72 - .../datafactory/models/integration_runtime.py | 51 - .../models/integration_runtime_auth_keys.py | 32 - .../integration_runtime_auth_keys_py3.py | 32 - .../integration_runtime_compute_properties.py | 60 - ...egration_runtime_compute_properties_py3.py | 60 - .../integration_runtime_connection_info.py | 70 - ...integration_runtime_connection_info_py3.py | 70 - ..._runtime_custom_setup_script_properties.py | 33 - ...time_custom_setup_script_properties_py3.py | 33 - ...tegration_runtime_data_proxy_properties.py | 37 - ...ation_runtime_data_proxy_properties_py3.py | 37 - .../integration_runtime_monitoring_data.py | 33 - ...integration_runtime_monitoring_data_py3.py | 33 - .../integration_runtime_node_ip_address.py | 35 - ...integration_runtime_node_ip_address_py3.py | 35 - ...ntegration_runtime_node_monitoring_data.py | 79 - ...ration_runtime_node_monitoring_data_py3.py | 79 - .../models/integration_runtime_py3.py | 51 - .../models/integration_runtime_reference.py | 48 - .../integration_runtime_reference_py3.py | 48 - ...ation_runtime_regenerate_key_parameters.py | 30 - ...n_runtime_regenerate_key_parameters_py3.py | 30 - .../models/integration_runtime_resource.py | 53 - .../integration_runtime_resource_paged.py | 27 - .../integration_runtime_resource_py3.py | 53 - .../integration_runtime_ssis_catalog_info.py | 55 - ...tegration_runtime_ssis_catalog_info_py3.py | 55 - .../integration_runtime_ssis_properties.py | 59 - ...integration_runtime_ssis_properties_py3.py | 59 - .../models/integration_runtime_status.py | 64 - ...ntegration_runtime_status_list_response.py | 40 - ...ration_runtime_status_list_response_py3.py | 40 - .../models/integration_runtime_status_py3.py | 64 - .../integration_runtime_status_response.py | 42 - ...integration_runtime_status_response_py3.py | 42 - .../integration_runtime_vnet_properties.py | 38 - ...integration_runtime_vnet_properties_py3.py | 38 - .../datafactory/models/jira_linked_service.py | 98 - .../models/jira_linked_service_py3.py | 98 - .../datafactory/models/jira_object_dataset.py | 72 - .../models/jira_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/jira_source.py | 57 - .../datafactory/models/jira_source_py3.py | 57 - .../mgmt/datafactory/models/json_format.py | 82 - .../datafactory/models/json_format_py3.py | 82 - .../models/linked_integration_runtime.py | 58 - ...d_integration_runtime_key_authorization.py | 39 - ...tegration_runtime_key_authorization_py3.py | 39 - .../models/linked_integration_runtime_py3.py | 58 - ..._integration_runtime_rbac_authorization.py | 41 - ...egration_runtime_rbac_authorization_py3.py | 41 - .../linked_integration_runtime_request.py | 35 - .../linked_integration_runtime_request_py3.py | 35 - .../models/linked_integration_runtime_type.py | 42 - .../linked_integration_runtime_type_py3.py | 42 - .../mgmt/datafactory/models/linked_service.py | 102 - .../datafactory/models/linked_service_py3.py | 102 - .../models/linked_service_reference.py | 48 - .../models/linked_service_reference_py3.py | 48 - .../models/linked_service_resource.py | 53 - .../models/linked_service_resource_paged.py | 27 - .../models/linked_service_resource_py3.py | 53 - .../models/log_storage_settings.py | 46 - .../models/log_storage_settings_py3.py | 46 - .../datafactory/models/lookup_activity.py | 74 - .../datafactory/models/lookup_activity_py3.py | 74 - .../models/magento_linked_service.py | 85 - .../models/magento_linked_service_py3.py | 85 - .../models/magento_object_dataset.py | 72 - .../models/magento_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/magento_source.py | 57 - .../datafactory/models/magento_source_py3.py | 57 - .../models/managed_integration_runtime.py | 65 - .../managed_integration_runtime_error.py | 55 - .../managed_integration_runtime_error_py3.py | 55 - .../managed_integration_runtime_node.py | 52 - .../managed_integration_runtime_node_py3.py | 52 - ...ed_integration_runtime_operation_result.py | 65 - ...ntegration_runtime_operation_result_py3.py | 65 - .../models/managed_integration_runtime_py3.py | 65 - .../managed_integration_runtime_status.py | 78 - .../managed_integration_runtime_status_py3.py | 78 - .../models/maria_db_linked_service.py | 69 - .../models/maria_db_linked_service_py3.py | 69 - .../datafactory/models/maria_db_source.py | 57 - .../datafactory/models/maria_db_source_py3.py | 57 - .../models/maria_db_table_dataset.py | 72 - .../models/maria_db_table_dataset_py3.py | 72 - .../models/marketo_linked_service.py | 90 - .../models/marketo_linked_service_py3.py | 90 - .../models/marketo_object_dataset.py | 72 - .../models/marketo_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/marketo_source.py | 57 - .../datafactory/models/marketo_source_py3.py | 57 - .../models/microsoft_access_linked_service.py | 86 - .../microsoft_access_linked_service_py3.py | 86 - .../models/microsoft_access_sink.py | 66 - .../models/microsoft_access_sink_py3.py | 66 - .../models/microsoft_access_source.py | 57 - .../models/microsoft_access_source_py3.py | 57 - .../models/microsoft_access_table_dataset.py | 72 - .../microsoft_access_table_dataset_py3.py | 72 - .../models/mongo_db_collection_dataset.py | 73 - .../models/mongo_db_collection_dataset_py3.py | 73 - .../mongo_db_cursor_methods_properties.py | 53 - .../mongo_db_cursor_methods_properties_py3.py | 53 - .../models/mongo_db_linked_service.py | 109 - .../models/mongo_db_linked_service_py3.py | 109 - .../datafactory/models/mongo_db_source.py | 57 - .../datafactory/models/mongo_db_source_py3.py | 57 - .../models/mongo_db_v2_collection_dataset.py | 73 - .../mongo_db_v2_collection_dataset_py3.py | 73 - .../models/mongo_db_v2_linked_service.py | 66 - .../models/mongo_db_v2_linked_service_py3.py | 66 - .../datafactory/models/mongo_db_v2_source.py | 71 - .../models/mongo_db_v2_source_py3.py | 71 - .../models/multiple_pipeline_trigger.py | 68 - .../models/multiple_pipeline_trigger_py3.py | 68 - .../models/my_sql_linked_service.py | 70 - .../models/my_sql_linked_service_py3.py | 70 - .../mgmt/datafactory/models/my_sql_source.py | 57 - .../datafactory/models/my_sql_source_py3.py | 57 - .../models/my_sql_table_dataset.py | 72 - .../models/my_sql_table_dataset_py3.py | 72 - .../models/netezza_linked_service.py | 69 - .../models/netezza_linked_service_py3.py | 69 - .../models/netezza_partition_settings.py | 42 - .../models/netezza_partition_settings_py3.py | 42 - .../mgmt/datafactory/models/netezza_source.py | 70 - .../datafactory/models/netezza_source_py3.py | 70 - .../models/netezza_table_dataset.py | 72 - .../models/netezza_table_dataset_py3.py | 72 - .../models/odata_linked_service.py | 127 - .../models/odata_linked_service_py3.py | 127 - .../models/odata_resource_dataset.py | 72 - .../models/odata_resource_dataset_py3.py | 72 - .../mgmt/datafactory/models/odata_source.py | 57 - .../datafactory/models/odata_source_py3.py | 57 - .../datafactory/models/odbc_linked_service.py | 86 - .../models/odbc_linked_service_py3.py | 86 - .../mgmt/datafactory/models/odbc_sink.py | 66 - .../mgmt/datafactory/models/odbc_sink_py3.py | 66 - .../mgmt/datafactory/models/odbc_source.py | 57 - .../datafactory/models/odbc_source_py3.py | 57 - .../datafactory/models/odbc_table_dataset.py | 72 - .../models/odbc_table_dataset_py3.py | 72 - .../datafactory/models/office365_dataset.py | 79 - .../models/office365_dataset_py3.py | 79 - .../models/office365_linked_service.py | 83 - .../models/office365_linked_service_py3.py | 83 - .../datafactory/models/office365_source.py | 78 - .../models/office365_source_py3.py | 78 - .../mgmt/datafactory/models/operation.py | 41 - .../datafactory/models/operation_display.py | 41 - .../models/operation_display_py3.py | 41 - .../models/operation_log_specification.py | 37 - .../models/operation_log_specification_py3.py | 37 - .../models/operation_metric_availability.py | 33 - .../operation_metric_availability_py3.py | 33 - .../models/operation_metric_dimension.py | 37 - .../models/operation_metric_dimension_py3.py | 37 - .../models/operation_metric_specification.py | 68 - .../operation_metric_specification_py3.py | 68 - .../datafactory/models/operation_paged.py | 27 - .../mgmt/datafactory/models/operation_py3.py | 41 - .../models/operation_service_specification.py | 34 - .../operation_service_specification_py3.py | 34 - .../models/oracle_linked_service.py | 71 - .../models/oracle_linked_service_py3.py | 71 - .../models/oracle_partition_settings.py | 46 - .../models/oracle_partition_settings_py3.py | 46 - .../oracle_service_cloud_linked_service.py | 95 - ...oracle_service_cloud_linked_service_py3.py | 95 - .../oracle_service_cloud_object_dataset.py | 72 - ...oracle_service_cloud_object_dataset_py3.py | 72 - .../models/oracle_service_cloud_source.py | 57 - .../models/oracle_service_cloud_source_py3.py | 57 - .../mgmt/datafactory/models/oracle_sink.py | 66 - .../datafactory/models/oracle_sink_py3.py | 66 - .../mgmt/datafactory/models/oracle_source.py | 76 - .../datafactory/models/oracle_source_py3.py | 76 - .../models/oracle_table_dataset.py | 82 - .../models/oracle_table_dataset_py3.py | 82 - .../mgmt/datafactory/models/orc_format.py | 46 - .../mgmt/datafactory/models/orc_format_py3.py | 46 - .../models/parameter_specification.py | 39 - .../models/parameter_specification_py3.py | 39 - .../datafactory/models/parquet_dataset.py | 76 - .../datafactory/models/parquet_dataset_py3.py | 76 - .../mgmt/datafactory/models/parquet_format.py | 46 - .../datafactory/models/parquet_format_py3.py | 46 - .../mgmt/datafactory/models/parquet_sink.py | 65 - .../datafactory/models/parquet_sink_py3.py | 65 - .../mgmt/datafactory/models/parquet_source.py | 56 - .../datafactory/models/parquet_source_py3.py | 56 - .../models/paypal_linked_service.py | 92 - .../models/paypal_linked_service_py3.py | 92 - .../models/paypal_object_dataset.py | 72 - .../models/paypal_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/paypal_source.py | 57 - .../datafactory/models/paypal_source_py3.py | 57 - .../models/phoenix_linked_service.py | 121 - .../models/phoenix_linked_service_py3.py | 121 - .../models/phoenix_object_dataset.py | 82 - .../models/phoenix_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/phoenix_source.py | 57 - .../datafactory/models/phoenix_source_py3.py | 57 - .../datafactory/models/pipeline_folder.py | 29 - .../datafactory/models/pipeline_folder_py3.py | 29 - .../datafactory/models/pipeline_reference.py | 48 - .../models/pipeline_reference_py3.py | 48 - .../datafactory/models/pipeline_resource.py | 84 - .../models/pipeline_resource_paged.py | 27 - .../models/pipeline_resource_py3.py | 84 - .../mgmt/datafactory/models/pipeline_run.py | 99 - .../models/pipeline_run_invoked_by.py | 45 - .../models/pipeline_run_invoked_by_py3.py | 45 - .../datafactory/models/pipeline_run_py3.py | 99 - .../models/pipeline_runs_query_response.py | 39 - .../pipeline_runs_query_response_py3.py | 39 - .../datafactory/models/polybase_settings.py | 53 - .../models/polybase_settings_py3.py | 53 - .../models/postgre_sql_linked_service.py | 70 - .../models/postgre_sql_linked_service_py3.py | 70 - .../datafactory/models/postgre_sql_source.py | 57 - .../models/postgre_sql_source_py3.py | 57 - .../models/postgre_sql_table_dataset.py | 72 - .../models/postgre_sql_table_dataset_py3.py | 72 - .../models/presto_linked_service.py | 132 - .../models/presto_linked_service_py3.py | 132 - .../models/presto_object_dataset.py | 82 - .../models/presto_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/presto_source.py | 57 - .../datafactory/models/presto_source_py3.py | 57 - .../models/quick_books_linked_service.py | 100 - .../models/quick_books_linked_service_py3.py | 100 - .../models/quick_books_object_dataset.py | 72 - .../models/quick_books_object_dataset_py3.py | 72 - .../datafactory/models/quick_books_source.py | 57 - .../models/quick_books_source_py3.py | 57 - .../datafactory/models/recurrence_schedule.py | 50 - .../models/recurrence_schedule_occurrence.py | 38 - .../recurrence_schedule_occurrence_py3.py | 38 - .../models/recurrence_schedule_py3.py | 50 - .../redirect_incompatible_row_settings.py | 47 - .../redirect_incompatible_row_settings_py3.py | 47 - .../models/redshift_unload_settings.py | 48 - .../models/redshift_unload_settings_py3.py | 48 - .../datafactory/models/relational_source.py | 57 - .../models/relational_source_py3.py | 57 - .../models/relational_table_dataset.py | 72 - .../models/relational_table_dataset_py3.py | 72 - .../models/rerun_trigger_resource.py | 54 - .../models/rerun_trigger_resource_paged.py | 27 - .../models/rerun_trigger_resource_py3.py | 54 - .../models/rerun_tumbling_window_trigger.py | 78 - ...mbling_window_trigger_action_parameters.py | 47 - ...ng_window_trigger_action_parameters_py3.py | 47 - .../rerun_tumbling_window_trigger_py3.py | 78 - .../azure/mgmt/datafactory/models/resource.py | 58 - .../mgmt/datafactory/models/resource_py3.py | 58 - .../models/responsys_linked_service.py | 94 - .../models/responsys_linked_service_py3.py | 94 - .../models/responsys_object_dataset.py | 72 - .../models/responsys_object_dataset_py3.py | 72 - .../datafactory/models/responsys_source.py | 57 - .../models/responsys_source_py3.py | 57 - .../models/rest_resource_dataset.py | 93 - .../models/rest_resource_dataset_py3.py | 93 - .../models/rest_service_linked_service.py | 107 - .../models/rest_service_linked_service_py3.py | 107 - .../mgmt/datafactory/models/rest_source.py | 86 - .../datafactory/models/rest_source_py3.py | 86 - .../mgmt/datafactory/models/retry_policy.py | 38 - .../datafactory/models/retry_policy_py3.py | 38 - .../models/run_filter_parameters.py | 54 - .../models/run_filter_parameters_py3.py | 54 - .../datafactory/models/run_query_filter.py | 53 - .../models/run_query_filter_py3.py | 53 - .../datafactory/models/run_query_order_by.py | 46 - .../models/run_query_order_by_py3.py | 46 - .../models/salesforce_linked_service.py | 82 - .../models/salesforce_linked_service_py3.py | 82 - ...lesforce_marketing_cloud_linked_service.py | 91 - ...orce_marketing_cloud_linked_service_py3.py | 91 - ...lesforce_marketing_cloud_object_dataset.py | 72 - ...orce_marketing_cloud_object_dataset_py3.py | 72 - .../salesforce_marketing_cloud_source.py | 57 - .../salesforce_marketing_cloud_source_py3.py | 57 - .../models/salesforce_object_dataset.py | 72 - .../models/salesforce_object_dataset_py3.py | 72 - ...salesforce_service_cloud_linked_service.py | 87 - ...sforce_service_cloud_linked_service_py3.py | 87 - ...salesforce_service_cloud_object_dataset.py | 72 - ...sforce_service_cloud_object_dataset_py3.py | 72 - .../models/salesforce_service_cloud_sink.py | 84 - .../salesforce_service_cloud_sink_py3.py | 84 - .../models/salesforce_service_cloud_source.py | 63 - .../salesforce_service_cloud_source_py3.py | 63 - .../datafactory/models/salesforce_sink.py | 84 - .../datafactory/models/salesforce_sink_py3.py | 84 - .../datafactory/models/salesforce_source.py | 63 - .../models/salesforce_source_py3.py | 63 - .../datafactory/models/sap_bw_cube_dataset.py | 67 - .../models/sap_bw_cube_dataset_py3.py | 67 - .../models/sap_bw_linked_service.py | 88 - .../models/sap_bw_linked_service_py3.py | 88 - .../mgmt/datafactory/models/sap_bw_source.py | 57 - .../datafactory/models/sap_bw_source_py3.py | 57 - .../sap_cloud_for_customer_linked_service.py | 76 - ...p_cloud_for_customer_linked_service_py3.py | 76 - ...sap_cloud_for_customer_resource_dataset.py | 73 - ...cloud_for_customer_resource_dataset_py3.py | 73 - .../models/sap_cloud_for_customer_sink.py | 67 - .../models/sap_cloud_for_customer_sink_py3.py | 67 - .../models/sap_cloud_for_customer_source.py | 57 - .../sap_cloud_for_customer_source_py3.py | 57 - .../models/sap_ecc_linked_service.py | 76 - .../models/sap_ecc_linked_service_py3.py | 76 - .../models/sap_ecc_resource_dataset.py | 73 - .../models/sap_ecc_resource_dataset_py3.py | 73 - .../mgmt/datafactory/models/sap_ecc_source.py | 57 - .../datafactory/models/sap_ecc_source_py3.py | 57 - .../models/sap_hana_linked_service.py | 85 - .../models/sap_hana_linked_service_py3.py | 85 - .../datafactory/models/sap_hana_source.py | 62 - .../datafactory/models/sap_hana_source_py3.py | 62 - .../models/sap_hana_table_dataset.py | 77 - .../models/sap_hana_table_dataset_py3.py | 77 - .../models/sap_open_hub_linked_service.py | 99 - .../models/sap_open_hub_linked_service_py3.py | 99 - .../datafactory/models/sap_open_hub_source.py | 66 - .../models/sap_open_hub_source_py3.py | 66 - .../models/sap_open_hub_table_dataset.py | 87 - .../models/sap_open_hub_table_dataset_py3.py | 87 - .../models/sap_table_linked_service.py | 140 - .../models/sap_table_linked_service_py3.py | 140 - .../models/sap_table_partition_settings.py | 47 - .../sap_table_partition_settings_py3.py | 47 - .../models/sap_table_resource_dataset.py | 73 - .../models/sap_table_resource_dataset_py3.py | 73 - .../datafactory/models/sap_table_source.py | 100 - .../models/sap_table_source_py3.py | 100 - .../datafactory/models/schedule_trigger.py | 64 - .../models/schedule_trigger_py3.py | 64 - .../models/schedule_trigger_recurrence.py | 54 - .../models/schedule_trigger_recurrence_py3.py | 54 - .../mgmt/datafactory/models/script_action.py | 49 - .../datafactory/models/script_action_py3.py | 49 - .../mgmt/datafactory/models/secret_base.py | 41 - .../datafactory/models/secret_base_py3.py | 41 - .../mgmt/datafactory/models/secure_string.py | 40 - .../datafactory/models/secure_string_py3.py | 40 - ...dency_tumbling_window_trigger_reference.py | 46 - ...y_tumbling_window_trigger_reference_py3.py | 46 - .../models/self_hosted_integration_runtime.py | 46 - .../self_hosted_integration_runtime_node.py | 139 - ...elf_hosted_integration_runtime_node_py3.py | 139 - .../self_hosted_integration_runtime_py3.py | 46 - .../self_hosted_integration_runtime_status.py | 146 - ...f_hosted_integration_runtime_status_py3.py | 146 - .../models/service_now_linked_service.py | 106 - .../models/service_now_linked_service_py3.py | 106 - .../models/service_now_object_dataset.py | 72 - .../models/service_now_object_dataset_py3.py | 72 - .../datafactory/models/service_now_source.py | 57 - .../models/service_now_source_py3.py | 57 - .../models/set_variable_activity.py | 59 - .../models/set_variable_activity_py3.py | 59 - .../mgmt/datafactory/models/sftp_location.py | 45 - .../datafactory/models/sftp_location_py3.py | 45 - .../datafactory/models/sftp_read_settings.py | 68 - .../models/sftp_read_settings_py3.py | 68 - .../models/sftp_server_linked_service.py | 119 - .../models/sftp_server_linked_service_py3.py | 119 - .../models/shopify_linked_service.py | 86 - .../models/shopify_linked_service_py3.py | 86 - .../models/shopify_object_dataset.py | 72 - .../models/shopify_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/shopify_source.py | 57 - .../datafactory/models/shopify_source_py3.py | 57 - .../models/spark_linked_service.py | 131 - .../models/spark_linked_service_py3.py | 131 - .../models/spark_object_dataset.py | 82 - .../models/spark_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/spark_source.py | 57 - .../datafactory/models/spark_source_py3.py | 57 - .../mgmt/datafactory/models/sql_dw_sink.py | 77 - .../datafactory/models/sql_dw_sink_py3.py | 77 - .../mgmt/datafactory/models/sql_dw_source.py | 70 - .../datafactory/models/sql_dw_source_py3.py | 70 - .../mgmt/datafactory/models/sql_mi_sink.py | 87 - .../datafactory/models/sql_mi_sink_py3.py | 87 - .../mgmt/datafactory/models/sql_mi_source.py | 73 - .../datafactory/models/sql_mi_source_py3.py | 73 - .../models/sql_server_linked_service.py | 74 - .../models/sql_server_linked_service_py3.py | 74 - .../datafactory/models/sql_server_sink.py | 87 - .../datafactory/models/sql_server_sink_py3.py | 87 - .../datafactory/models/sql_server_source.py | 73 - .../models/sql_server_source_py3.py | 73 - .../sql_server_stored_procedure_activity.py | 70 - ...ql_server_stored_procedure_activity_py3.py | 70 - .../models/sql_server_table_dataset.py | 82 - .../models/sql_server_table_dataset_py3.py | 82 - .../azure/mgmt/datafactory/models/sql_sink.py | 87 - .../mgmt/datafactory/models/sql_sink_py3.py | 87 - .../mgmt/datafactory/models/sql_source.py | 69 - .../mgmt/datafactory/models/sql_source_py3.py | 69 - .../models/square_linked_service.py | 98 - .../models/square_linked_service_py3.py | 98 - .../models/square_object_dataset.py | 72 - .../models/square_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/square_source.py | 57 - .../datafactory/models/square_source_py3.py | 57 - .../models/ssis_access_credential.py | 44 - .../models/ssis_access_credential_py3.py | 44 - .../datafactory/models/ssis_environment.py | 51 - .../models/ssis_environment_py3.py | 51 - .../models/ssis_environment_reference.py | 40 - .../models/ssis_environment_reference_py3.py | 40 - .../models/ssis_execution_credential.py | 44 - .../models/ssis_execution_credential_py3.py | 44 - .../models/ssis_execution_parameter.py | 35 - .../models/ssis_execution_parameter_py3.py | 35 - .../mgmt/datafactory/models/ssis_folder.py | 43 - .../datafactory/models/ssis_folder_py3.py | 43 - .../datafactory/models/ssis_log_location.py | 57 - .../models/ssis_log_location_py3.py | 57 - .../models/ssis_object_metadata.py | 53 - .../ssis_object_metadata_list_response.py | 33 - .../ssis_object_metadata_list_response_py3.py | 33 - .../models/ssis_object_metadata_py3.py | 53 - .../ssis_object_metadata_status_response.py | 40 - ...sis_object_metadata_status_response_py3.py | 40 - .../mgmt/datafactory/models/ssis_package.py | 59 - .../models/ssis_package_location.py | 54 - .../models/ssis_package_location_py3.py | 54 - .../datafactory/models/ssis_package_py3.py | 59 - .../mgmt/datafactory/models/ssis_parameter.py | 72 - .../datafactory/models/ssis_parameter_py3.py | 72 - .../mgmt/datafactory/models/ssis_project.py | 60 - .../datafactory/models/ssis_project_py3.py | 60 - .../models/ssis_property_override.py | 40 - .../models/ssis_property_override_py3.py | 40 - .../mgmt/datafactory/models/ssis_variable.py | 52 - .../datafactory/models/ssis_variable_py3.py | 52 - .../datafactory/models/staging_settings.py | 51 - .../models/staging_settings_py3.py | 51 - .../datafactory/models/store_read_settings.py | 45 - .../models/store_read_settings_py3.py | 45 - .../models/store_write_settings.py | 49 - .../models/store_write_settings_py3.py | 49 - .../models/stored_procedure_parameter.py | 35 - .../models/stored_procedure_parameter_py3.py | 35 - .../mgmt/datafactory/models/sub_resource.py | 50 - .../datafactory/models/sub_resource_py3.py | 50 - .../models/sybase_linked_service.py | 91 - .../models/sybase_linked_service_py3.py | 91 - .../mgmt/datafactory/models/sybase_source.py | 57 - .../datafactory/models/sybase_source_py3.py | 57 - .../models/sybase_table_dataset.py | 72 - .../models/sybase_table_dataset_py3.py | 72 - .../models/teradata_linked_service.py | 84 - .../models/teradata_linked_service_py3.py | 84 - .../models/teradata_partition_settings.py | 42 - .../models/teradata_partition_settings_py3.py | 42 - .../datafactory/models/teradata_source.py | 70 - .../datafactory/models/teradata_source_py3.py | 70 - .../models/teradata_table_dataset.py | 77 - .../models/teradata_table_dataset_py3.py | 77 - .../mgmt/datafactory/models/text_format.py | 99 - .../datafactory/models/text_format_py3.py | 99 - .../azure/mgmt/datafactory/models/trigger.py | 68 - .../models/trigger_dependency_reference.py | 46 - .../trigger_dependency_reference_py3.py | 46 - .../models/trigger_pipeline_reference.py | 32 - .../models/trigger_pipeline_reference_py3.py | 32 - .../mgmt/datafactory/models/trigger_py3.py | 68 - .../datafactory/models/trigger_reference.py | 44 - .../models/trigger_reference_py3.py | 44 - .../datafactory/models/trigger_resource.py | 53 - .../models/trigger_resource_paged.py | 27 - .../models/trigger_resource_py3.py | 53 - .../mgmt/datafactory/models/trigger_run.py | 78 - .../datafactory/models/trigger_run_py3.py | 78 - .../models/trigger_runs_query_response.py | 39 - .../models/trigger_runs_query_response_py3.py | 39 - .../models/tumbling_window_trigger.py | 112 - ...ing_window_trigger_dependency_reference.py | 50 - ...window_trigger_dependency_reference_py3.py | 50 - .../models/tumbling_window_trigger_py3.py | 112 - .../mgmt/datafactory/models/until_activity.py | 72 - .../datafactory/models/until_activity_py3.py | 72 - ...update_integration_runtime_node_request.py | 34 - ...te_integration_runtime_node_request_py3.py | 34 - .../update_integration_runtime_request.py | 38 - .../update_integration_runtime_request_py3.py | 38 - .../datafactory/models/user_access_policy.py | 51 - .../models/user_access_policy_py3.py | 51 - .../mgmt/datafactory/models/user_property.py | 40 - .../datafactory/models/user_property_py3.py | 40 - .../datafactory/models/validation_activity.py | 81 - .../models/validation_activity_py3.py | 81 - .../models/variable_specification.py | 39 - .../models/variable_specification_py3.py | 39 - .../models/vertica_linked_service.py | 69 - .../models/vertica_linked_service_py3.py | 69 - .../mgmt/datafactory/models/vertica_source.py | 57 - .../datafactory/models/vertica_source_py3.py | 57 - .../models/vertica_table_dataset.py | 82 - .../models/vertica_table_dataset_py3.py | 82 - .../mgmt/datafactory/models/wait_activity.py | 56 - .../datafactory/models/wait_activity_py3.py | 56 - .../mgmt/datafactory/models/web_activity.py | 98 - .../models/web_activity_authentication.py | 53 - .../models/web_activity_authentication_py3.py | 53 - .../datafactory/models/web_activity_py3.py | 98 - .../models/web_anonymous_authentication.py | 41 - .../web_anonymous_authentication_py3.py | 41 - .../models/web_basic_authentication.py | 52 - .../models/web_basic_authentication_py3.py | 52 - .../web_client_certificate_authentication.py | 53 - ...b_client_certificate_authentication_py3.py | 53 - .../datafactory/models/web_hook_activity.py | 92 - .../models/web_hook_activity_py3.py | 92 - .../datafactory/models/web_linked_service.py | 59 - .../models/web_linked_service_py3.py | 59 - .../web_linked_service_type_properties.py | 50 - .../web_linked_service_type_properties_py3.py | 50 - .../mgmt/datafactory/models/web_source.py | 52 - .../mgmt/datafactory/models/web_source_py3.py | 52 - .../datafactory/models/web_table_dataset.py | 78 - .../models/web_table_dataset_py3.py | 78 - .../datafactory/models/xero_linked_service.py | 93 - .../models/xero_linked_service_py3.py | 93 - .../datafactory/models/xero_object_dataset.py | 72 - .../models/xero_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/xero_source.py | 57 - .../datafactory/models/xero_source_py3.py | 57 - .../datafactory/models/zoho_linked_service.py | 85 - .../models/zoho_linked_service_py3.py | 85 - .../datafactory/models/zoho_object_dataset.py | 72 - .../models/zoho_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/zoho_source.py | 57 - .../datafactory/models/zoho_source_py3.py | 57 - .../mgmt/datafactory/operations/__init__.py | 28 +- ...ations.py => _activity_runs_operations.py} | 3 +- ..._operations.py => _datasets_operations.py} | 18 +- ...ons.py => _exposure_control_operations.py} | 4 +- ...operations.py => _factories_operations.py} | 36 +- ... _integration_runtime_nodes_operations.py} | 5 +- ...ion_runtime_object_metadata_operations.py} | 3 +- ...py => _integration_runtimes_operations.py} | 25 +- ...ions.py => _linked_services_operations.py} | 18 +- .../{operations.py => _operations.py} | 16 +- ...ations.py => _pipeline_runs_operations.py} | 4 +- ...operations.py => _pipelines_operations.py} | 19 +- ...tions.py => _rerun_triggers_operations.py} | 17 +- ...rations.py => _trigger_runs_operations.py} | 3 +- ..._operations.py => _triggers_operations.py} | 18 +- 1045 files changed, 59258 insertions(+), 70933 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/{data_factory_management_client.py => _data_factory_management_client.py} (70%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{data_factory_management_client_enums.py => _data_factory_management_client_enums.py} (100%) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{activity_runs_operations.py => _activity_runs_operations.py} (97%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{datasets_operations.py => _datasets_operations.py} (97%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{exposure_control_operations.py => _exposure_control_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{factories_operations.py => _factories_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{integration_runtime_nodes_operations.py => _integration_runtime_nodes_operations.py} (99%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{integration_runtime_object_metadata_operations.py => _integration_runtime_object_metadata_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{integration_runtimes_operations.py => _integration_runtimes_operations.py} (99%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{linked_services_operations.py => _linked_services_operations.py} (97%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{operations.py => _operations.py} (90%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{pipeline_runs_operations.py => _pipeline_runs_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{pipelines_operations.py => _pipelines_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{rerun_triggers_operations.py => _rerun_triggers_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{trigger_runs_operations.py => _trigger_runs_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{triggers_operations.py => _triggers_operations.py} (98%) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py index db14f5d7f4f6..5ab3f5226cb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py @@ -9,10 +9,11 @@ # regenerated. # -------------------------------------------------------------------------- -from .data_factory_management_client import DataFactoryManagementClient -from .version import VERSION +from ._configuration import DataFactoryManagementClientConfiguration +from ._data_factory_management_client import DataFactoryManagementClient +__all__ = ['DataFactoryManagementClient', 'DataFactoryManagementClientConfiguration'] -__all__ = ['DataFactoryManagementClient'] +from .version import VERSION __version__ = VERSION diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py new file mode 100644 index 000000000000..80666808edb1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +from msrestazure import AzureConfiguration + +from .version import VERSION + + +class DataFactoryManagementClientConfiguration(AzureConfiguration): + """Configuration for DataFactoryManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: The subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + if not base_url: + base_url = 'https://management.azure.com' + + super(DataFactoryManagementClientConfiguration, self).__init__(base_url) + + # Starting Autorest.Python 4.0.64, make connection pool activated by default + self.keep_alive = True + + self.add_user_agent('azure-mgmt-datafactory/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.subscription_id = subscription_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py similarity index 70% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index bb8a2a22fd77..14cee0777347 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -11,55 +11,23 @@ from msrest.service_client import SDKClient from msrest import Serializer, Deserializer -from msrestazure import AzureConfiguration -from .version import VERSION -from .operations.operations import Operations -from .operations.factories_operations import FactoriesOperations -from .operations.exposure_control_operations import ExposureControlOperations -from .operations.integration_runtimes_operations import IntegrationRuntimesOperations -from .operations.integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations -from .operations.integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations -from .operations.linked_services_operations import LinkedServicesOperations -from .operations.datasets_operations import DatasetsOperations -from .operations.pipelines_operations import PipelinesOperations -from .operations.pipeline_runs_operations import PipelineRunsOperations -from .operations.activity_runs_operations import ActivityRunsOperations -from .operations.triggers_operations import TriggersOperations -from .operations.trigger_runs_operations import TriggerRunsOperations -from .operations.rerun_triggers_operations import RerunTriggersOperations -from . import models - - -class DataFactoryManagementClientConfiguration(AzureConfiguration): - """Configuration for DataFactoryManagementClient - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credentials: Credentials needed for the client to connect to Azure. - :type credentials: :mod:`A msrestazure Credentials - object` - :param subscription_id: The subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - """ - - def __init__( - self, credentials, subscription_id, base_url=None): - if credentials is None: - raise ValueError("Parameter 'credentials' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - if not base_url: - base_url = 'https://management.azure.com' - - super(DataFactoryManagementClientConfiguration, self).__init__(base_url) - - self.add_user_agent('azure-mgmt-datafactory/{}'.format(VERSION)) - self.add_user_agent('Azure-SDK-For-Python') - - self.credentials = credentials - self.subscription_id = subscription_id +from ._configuration import DataFactoryManagementClientConfiguration +from .operations import Operations +from .operations import FactoriesOperations +from .operations import ExposureControlOperations +from .operations import IntegrationRuntimesOperations +from .operations import IntegrationRuntimeObjectMetadataOperations +from .operations import IntegrationRuntimeNodesOperations +from .operations import LinkedServicesOperations +from .operations import DatasetsOperations +from .operations import PipelinesOperations +from .operations import PipelineRunsOperations +from .operations import ActivityRunsOperations +from .operations import TriggersOperations +from .operations import TriggerRunsOperations +from .operations import RerunTriggersOperations +from . import models class DataFactoryManagementClient(SDKClient): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 8ec6a5fa8fb4..aae612d71bb6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -10,1030 +10,1036 @@ # -------------------------------------------------------------------------- try: - from .resource_py3 import Resource - from .sub_resource_py3 import SubResource - from .expression_py3 import Expression - from .secure_string_py3 import SecureString - from .linked_service_reference_py3 import LinkedServiceReference - from .azure_key_vault_secret_reference_py3 import AzureKeyVaultSecretReference - from .secret_base_py3 import SecretBase - from .factory_identity_py3 import FactoryIdentity - from .factory_repo_configuration_py3 import FactoryRepoConfiguration - from .factory_py3 import Factory - from .integration_runtime_py3 import IntegrationRuntime - from .integration_runtime_resource_py3 import IntegrationRuntimeResource - from .integration_runtime_reference_py3 import IntegrationRuntimeReference - from .integration_runtime_status_py3 import IntegrationRuntimeStatus - from .integration_runtime_status_response_py3 import IntegrationRuntimeStatusResponse - from .integration_runtime_status_list_response_py3 import IntegrationRuntimeStatusListResponse - from .update_integration_runtime_request_py3 import UpdateIntegrationRuntimeRequest - from .update_integration_runtime_node_request_py3 import UpdateIntegrationRuntimeNodeRequest - from .linked_integration_runtime_request_py3 import LinkedIntegrationRuntimeRequest - from .create_linked_integration_runtime_request_py3 import CreateLinkedIntegrationRuntimeRequest - from .parameter_specification_py3 import ParameterSpecification - from .linked_service_py3 import LinkedService - from .linked_service_resource_py3 import LinkedServiceResource - from .dataset_folder_py3 import DatasetFolder - from .dataset_py3 import Dataset - from .dataset_resource_py3 import DatasetResource - from .activity_dependency_py3 import ActivityDependency - from .user_property_py3 import UserProperty - from .activity_py3 import Activity - from .variable_specification_py3 import VariableSpecification - from .pipeline_folder_py3 import PipelineFolder - from .pipeline_resource_py3 import PipelineResource - from .trigger_py3 import Trigger - from .trigger_resource_py3 import TriggerResource - from .create_run_response_py3 import CreateRunResponse - from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration - from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration - from .factory_repo_update_py3 import FactoryRepoUpdate - from .git_hub_access_token_request_py3 import GitHubAccessTokenRequest - from .git_hub_access_token_response_py3 import GitHubAccessTokenResponse - from .user_access_policy_py3 import UserAccessPolicy - from .access_policy_response_py3 import AccessPolicyResponse - from .pipeline_reference_py3 import PipelineReference - from .trigger_pipeline_reference_py3 import TriggerPipelineReference - from .factory_update_parameters_py3 import FactoryUpdateParameters - from .dataset_reference_py3 import DatasetReference - from .run_query_filter_py3 import RunQueryFilter - from .run_query_order_by_py3 import RunQueryOrderBy - from .run_filter_parameters_py3 import RunFilterParameters - from .pipeline_run_invoked_by_py3 import PipelineRunInvokedBy - from .pipeline_run_py3 import PipelineRun - from .pipeline_runs_query_response_py3 import PipelineRunsQueryResponse - from .activity_run_py3 import ActivityRun - from .activity_runs_query_response_py3 import ActivityRunsQueryResponse - from .trigger_run_py3 import TriggerRun - from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse - from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters - from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger - from .rerun_trigger_resource_py3 import RerunTriggerResource - from .operation_display_py3 import OperationDisplay - from .operation_log_specification_py3 import OperationLogSpecification - from .operation_metric_availability_py3 import OperationMetricAvailability - from .operation_metric_dimension_py3 import OperationMetricDimension - from .operation_metric_specification_py3 import OperationMetricSpecification - from .operation_service_specification_py3 import OperationServiceSpecification - from .operation_py3 import Operation - from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest - from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse - from .exposure_control_request_py3 import ExposureControlRequest - from .exposure_control_response_py3 import ExposureControlResponse - from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference - from .trigger_reference_py3 import TriggerReference - from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference - from .trigger_dependency_reference_py3 import TriggerDependencyReference - from .dependency_reference_py3 import DependencyReference - from .retry_policy_py3 import RetryPolicy - from .tumbling_window_trigger_py3 import TumblingWindowTrigger - from .blob_events_trigger_py3 import BlobEventsTrigger - from .blob_trigger_py3 import BlobTrigger - from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence - from .recurrence_schedule_py3 import RecurrenceSchedule - from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence - from .schedule_trigger_py3 import ScheduleTrigger - from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - from .azure_function_linked_service_py3 import AzureFunctionLinkedService - from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService - from .sap_table_linked_service_py3 import SapTableLinkedService - from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService - from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService - from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService - from .responsys_linked_service_py3 import ResponsysLinkedService - from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService - from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService - from .script_action_py3 import ScriptAction - from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService - from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService - from .netezza_linked_service_py3 import NetezzaLinkedService - from .vertica_linked_service_py3 import VerticaLinkedService - from .zoho_linked_service_py3 import ZohoLinkedService - from .xero_linked_service_py3 import XeroLinkedService - from .square_linked_service_py3 import SquareLinkedService - from .spark_linked_service_py3 import SparkLinkedService - from .shopify_linked_service_py3 import ShopifyLinkedService - from .service_now_linked_service_py3 import ServiceNowLinkedService - from .quick_books_linked_service_py3 import QuickBooksLinkedService - from .presto_linked_service_py3 import PrestoLinkedService - from .phoenix_linked_service_py3 import PhoenixLinkedService - from .paypal_linked_service_py3 import PaypalLinkedService - from .marketo_linked_service_py3 import MarketoLinkedService - from .azure_maria_db_linked_service_py3 import AzureMariaDBLinkedService - from .maria_db_linked_service_py3 import MariaDBLinkedService - from .magento_linked_service_py3 import MagentoLinkedService - from .jira_linked_service_py3 import JiraLinkedService - from .impala_linked_service_py3 import ImpalaLinkedService - from .hubspot_linked_service_py3 import HubspotLinkedService - from .hive_linked_service_py3 import HiveLinkedService - from .hbase_linked_service_py3 import HBaseLinkedService - from .greenplum_linked_service_py3 import GreenplumLinkedService - from .google_big_query_linked_service_py3 import GoogleBigQueryLinkedService - from .eloqua_linked_service_py3 import EloquaLinkedService - from .drill_linked_service_py3 import DrillLinkedService - from .couchbase_linked_service_py3 import CouchbaseLinkedService - from .concur_linked_service_py3 import ConcurLinkedService - from .azure_postgre_sql_linked_service_py3 import AzurePostgreSqlLinkedService - from .amazon_mws_linked_service_py3 import AmazonMWSLinkedService - from .sap_hana_linked_service_py3 import SapHanaLinkedService - from .sap_bw_linked_service_py3 import SapBWLinkedService - from .sftp_server_linked_service_py3 import SftpServerLinkedService - from .ftp_server_linked_service_py3 import FtpServerLinkedService - from .http_linked_service_py3 import HttpLinkedService - from .azure_search_linked_service_py3 import AzureSearchLinkedService - from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService - from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService - from .amazon_s3_linked_service_py3 import AmazonS3LinkedService - from .rest_service_linked_service_py3 import RestServiceLinkedService - from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService - from .sap_ecc_linked_service_py3 import SapEccLinkedService - from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService - from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService - from .salesforce_linked_service_py3 import SalesforceLinkedService - from .office365_linked_service_py3 import Office365LinkedService - from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService - from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService - from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService - from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService - from .mongo_db_linked_service_py3 import MongoDbLinkedService - from .cassandra_linked_service_py3 import CassandraLinkedService - from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication - from .web_basic_authentication_py3 import WebBasicAuthentication - from .web_anonymous_authentication_py3 import WebAnonymousAuthentication - from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - from .web_linked_service_py3 import WebLinkedService - from .odata_linked_service_py3 import ODataLinkedService - from .hdfs_linked_service_py3 import HdfsLinkedService - from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService - from .informix_linked_service_py3 import InformixLinkedService - from .odbc_linked_service_py3 import OdbcLinkedService - from .azure_ml_linked_service_py3 import AzureMLLinkedService - from .teradata_linked_service_py3 import TeradataLinkedService - from .db2_linked_service_py3 import Db2LinkedService - from .sybase_linked_service_py3 import SybaseLinkedService - from .postgre_sql_linked_service_py3 import PostgreSqlLinkedService - from .my_sql_linked_service_py3 import MySqlLinkedService - from .azure_my_sql_linked_service_py3 import AzureMySqlLinkedService - from .oracle_linked_service_py3 import OracleLinkedService - from .file_server_linked_service_py3 import FileServerLinkedService - from .hd_insight_linked_service_py3 import HDInsightLinkedService - from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService - from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService - from .dynamics_linked_service_py3 import DynamicsLinkedService - from .cosmos_db_linked_service_py3 import CosmosDbLinkedService - from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService - from .azure_batch_linked_service_py3 import AzureBatchLinkedService - from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService - from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService - from .sql_server_linked_service_py3 import SqlServerLinkedService - from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService - from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService - from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService - from .azure_storage_linked_service_py3 import AzureStorageLinkedService - from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset - from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset - from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset - from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset - from .responsys_object_dataset_py3 import ResponsysObjectDataset - from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset - from .vertica_table_dataset_py3 import VerticaTableDataset - from .netezza_table_dataset_py3 import NetezzaTableDataset - from .zoho_object_dataset_py3 import ZohoObjectDataset - from .xero_object_dataset_py3 import XeroObjectDataset - from .square_object_dataset_py3 import SquareObjectDataset - from .spark_object_dataset_py3 import SparkObjectDataset - from .shopify_object_dataset_py3 import ShopifyObjectDataset - from .service_now_object_dataset_py3 import ServiceNowObjectDataset - from .quick_books_object_dataset_py3 import QuickBooksObjectDataset - from .presto_object_dataset_py3 import PrestoObjectDataset - from .phoenix_object_dataset_py3 import PhoenixObjectDataset - from .paypal_object_dataset_py3 import PaypalObjectDataset - from .marketo_object_dataset_py3 import MarketoObjectDataset - from .azure_maria_db_table_dataset_py3 import AzureMariaDBTableDataset - from .maria_db_table_dataset_py3 import MariaDBTableDataset - from .magento_object_dataset_py3 import MagentoObjectDataset - from .jira_object_dataset_py3 import JiraObjectDataset - from .impala_object_dataset_py3 import ImpalaObjectDataset - from .hubspot_object_dataset_py3 import HubspotObjectDataset - from .hive_object_dataset_py3 import HiveObjectDataset - from .hbase_object_dataset_py3 import HBaseObjectDataset - from .greenplum_table_dataset_py3 import GreenplumTableDataset - from .google_big_query_object_dataset_py3 import GoogleBigQueryObjectDataset - from .eloqua_object_dataset_py3 import EloquaObjectDataset - from .drill_table_dataset_py3 import DrillTableDataset - from .couchbase_table_dataset_py3 import CouchbaseTableDataset - from .concur_object_dataset_py3 import ConcurObjectDataset - from .azure_postgre_sql_table_dataset_py3 import AzurePostgreSqlTableDataset - from .amazon_mws_object_dataset_py3 import AmazonMWSObjectDataset - from .dataset_zip_deflate_compression_py3 import DatasetZipDeflateCompression - from .dataset_deflate_compression_py3 import DatasetDeflateCompression - from .dataset_gzip_compression_py3 import DatasetGZipCompression - from .dataset_bzip2_compression_py3 import DatasetBZip2Compression - from .dataset_compression_py3 import DatasetCompression - from .parquet_format_py3 import ParquetFormat - from .orc_format_py3 import OrcFormat - from .avro_format_py3 import AvroFormat - from .json_format_py3 import JsonFormat - from .text_format_py3 import TextFormat - from .dataset_storage_format_py3 import DatasetStorageFormat - from .http_dataset_py3 import HttpDataset - from .azure_search_index_dataset_py3 import AzureSearchIndexDataset - from .web_table_dataset_py3 import WebTableDataset - from .sap_table_resource_dataset_py3 import SapTableResourceDataset - from .rest_resource_dataset_py3 import RestResourceDataset - from .sql_server_table_dataset_py3 import SqlServerTableDataset - from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset - from .sap_hana_table_dataset_py3 import SapHanaTableDataset - from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset - from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset - from .sap_bw_cube_dataset_py3 import SapBwCubeDataset - from .sybase_table_dataset_py3 import SybaseTableDataset - from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset - from .salesforce_object_dataset_py3 import SalesforceObjectDataset - from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset - from .postgre_sql_table_dataset_py3 import PostgreSqlTableDataset - from .my_sql_table_dataset_py3 import MySqlTableDataset - from .odbc_table_dataset_py3 import OdbcTableDataset - from .informix_table_dataset_py3 import InformixTableDataset - from .relational_table_dataset_py3 import RelationalTableDataset - from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset - from .teradata_table_dataset_py3 import TeradataTableDataset - from .oracle_table_dataset_py3 import OracleTableDataset - from .odata_resource_dataset_py3 import ODataResourceDataset - from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset - from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset - from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset - from .file_share_dataset_py3 import FileShareDataset - from .office365_dataset_py3 import Office365Dataset - from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset - from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset - from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset - from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset - from .dynamics_entity_dataset_py3 import DynamicsEntityDataset - from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset - from .custom_dataset_py3 import CustomDataset - from .cassandra_table_dataset_py3 import CassandraTableDataset - from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset - from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset - from .azure_sql_table_dataset_py3 import AzureSqlTableDataset - from .azure_table_dataset_py3 import AzureTableDataset - from .azure_blob_dataset_py3 import AzureBlobDataset - from .hdfs_location_py3 import HdfsLocation - from .http_server_location_py3 import HttpServerLocation - from .sftp_location_py3 import SftpLocation - from .ftp_server_location_py3 import FtpServerLocation - from .file_server_location_py3 import FileServerLocation - from .amazon_s3_location_py3 import AmazonS3Location - from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation - from .azure_blob_fs_location_py3 import AzureBlobFSLocation - from .azure_blob_storage_location_py3 import AzureBlobStorageLocation - from .dataset_location_py3 import DatasetLocation - from .binary_dataset_py3 import BinaryDataset - from .delimited_text_dataset_py3 import DelimitedTextDataset - from .parquet_dataset_py3 import ParquetDataset - from .avro_dataset_py3 import AvroDataset - from .amazon_s3_dataset_py3 import AmazonS3Dataset - from .activity_policy_py3 import ActivityPolicy - from .azure_function_activity_py3 import AzureFunctionActivity - from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity - from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity - from .databricks_notebook_activity_py3 import DatabricksNotebookActivity - from .data_lake_analytics_usql_activity_py3 import DataLakeAnalyticsUSQLActivity - from .azure_ml_update_resource_activity_py3 import AzureMLUpdateResourceActivity - from .azure_ml_web_service_file_py3 import AzureMLWebServiceFile - from .azure_ml_batch_execution_activity_py3 import AzureMLBatchExecutionActivity - from .get_metadata_activity_py3 import GetMetadataActivity - from .web_activity_authentication_py3 import WebActivityAuthentication - from .web_activity_py3 import WebActivity - from .redshift_unload_settings_py3 import RedshiftUnloadSettings - from .amazon_redshift_source_py3 import AmazonRedshiftSource - from .google_ad_words_source_py3 import GoogleAdWordsSource - from .oracle_service_cloud_source_py3 import OracleServiceCloudSource - from .dynamics_ax_source_py3 import DynamicsAXSource - from .responsys_source_py3 import ResponsysSource - from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource - from .vertica_source_py3 import VerticaSource - from .netezza_partition_settings_py3 import NetezzaPartitionSettings - from .netezza_source_py3 import NetezzaSource - from .zoho_source_py3 import ZohoSource - from .xero_source_py3 import XeroSource - from .square_source_py3 import SquareSource - from .spark_source_py3 import SparkSource - from .shopify_source_py3 import ShopifySource - from .service_now_source_py3 import ServiceNowSource - from .quick_books_source_py3 import QuickBooksSource - from .presto_source_py3 import PrestoSource - from .phoenix_source_py3 import PhoenixSource - from .paypal_source_py3 import PaypalSource - from .marketo_source_py3 import MarketoSource - from .azure_maria_db_source_py3 import AzureMariaDBSource - from .maria_db_source_py3 import MariaDBSource - from .magento_source_py3 import MagentoSource - from .jira_source_py3 import JiraSource - from .impala_source_py3 import ImpalaSource - from .hubspot_source_py3 import HubspotSource - from .hive_source_py3 import HiveSource - from .hbase_source_py3 import HBaseSource - from .greenplum_source_py3 import GreenplumSource - from .google_big_query_source_py3 import GoogleBigQuerySource - from .eloqua_source_py3 import EloquaSource - from .drill_source_py3 import DrillSource - from .couchbase_source_py3 import CouchbaseSource - from .concur_source_py3 import ConcurSource - from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource - from .amazon_mws_source_py3 import AmazonMWSSource - from .http_source_py3 import HttpSource - from .azure_blob_fs_source_py3 import AzureBlobFSSource - from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource - from .office365_source_py3 import Office365Source - from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties - from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource - from .mongo_db_v2_source_py3 import MongoDbV2Source - from .mongo_db_source_py3 import MongoDbSource - from .cassandra_source_py3 import CassandraSource - from .web_source_py3 import WebSource - from .teradata_partition_settings_py3 import TeradataPartitionSettings - from .teradata_source_py3 import TeradataSource - from .oracle_partition_settings_py3 import OraclePartitionSettings - from .oracle_source_py3 import OracleSource - from .azure_data_explorer_source_py3 import AzureDataExplorerSource - from .azure_my_sql_source_py3 import AzureMySqlSource - from .distcp_settings_py3 import DistcpSettings - from .hdfs_source_py3 import HdfsSource - from .file_system_source_py3 import FileSystemSource - from .sql_dw_source_py3 import SqlDWSource - from .stored_procedure_parameter_py3 import StoredProcedureParameter - from .sql_mi_source_py3 import SqlMISource - from .azure_sql_source_py3 import AzureSqlSource - from .sql_server_source_py3 import SqlServerSource - from .sql_source_py3 import SqlSource - from .rest_source_py3 import RestSource - from .sap_table_partition_settings_py3 import SapTablePartitionSettings - from .sap_table_source_py3 import SapTableSource - from .sap_open_hub_source_py3 import SapOpenHubSource - from .sap_hana_source_py3 import SapHanaSource - from .sap_ecc_source_py3 import SapEccSource - from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource - from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource - from .salesforce_source_py3 import SalesforceSource - from .odata_source_py3 import ODataSource - from .sap_bw_source_py3 import SapBwSource - from .sybase_source_py3 import SybaseSource - from .postgre_sql_source_py3 import PostgreSqlSource - from .my_sql_source_py3 import MySqlSource - from .odbc_source_py3 import OdbcSource - from .db2_source_py3 import Db2Source - from .microsoft_access_source_py3 import MicrosoftAccessSource - from .informix_source_py3 import InformixSource - from .relational_source_py3 import RelationalSource - from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource - from .dynamics_crm_source_py3 import DynamicsCrmSource - from .dynamics_source_py3 import DynamicsSource - from .document_db_collection_source_py3 import DocumentDbCollectionSource - from .blob_source_py3 import BlobSource - from .azure_table_source_py3 import AzureTableSource - from .hdfs_read_settings_py3 import HdfsReadSettings - from .http_read_settings_py3 import HttpReadSettings - from .sftp_read_settings_py3 import SftpReadSettings - from .ftp_read_settings_py3 import FtpReadSettings - from .file_server_read_settings_py3 import FileServerReadSettings - from .amazon_s3_read_settings_py3 import AmazonS3ReadSettings - from .azure_data_lake_store_read_settings_py3 import AzureDataLakeStoreReadSettings - from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings - from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings - from .store_read_settings_py3 import StoreReadSettings - from .binary_source_py3 import BinarySource - from .format_read_settings_py3 import FormatReadSettings - from .delimited_text_read_settings_py3 import DelimitedTextReadSettings - from .delimited_text_source_py3 import DelimitedTextSource - from .parquet_source_py3 import ParquetSource - from .avro_source_py3 import AvroSource - from .copy_source_py3 import CopySource - from .lookup_activity_py3 import LookupActivity - from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity - from .log_storage_settings_py3 import LogStorageSettings - from .delete_activity_py3 import DeleteActivity - from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity - from .custom_activity_reference_object_py3 import CustomActivityReferenceObject - from .custom_activity_py3 import CustomActivity - from .ssis_access_credential_py3 import SSISAccessCredential - from .ssis_log_location_py3 import SSISLogLocation - from .ssis_property_override_py3 import SSISPropertyOverride - from .ssis_execution_parameter_py3 import SSISExecutionParameter - from .ssis_execution_credential_py3 import SSISExecutionCredential - from .ssis_package_location_py3 import SSISPackageLocation - from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity - from .hd_insight_spark_activity_py3 import HDInsightSparkActivity - from .hd_insight_streaming_activity_py3 import HDInsightStreamingActivity - from .hd_insight_map_reduce_activity_py3 import HDInsightMapReduceActivity - from .hd_insight_pig_activity_py3 import HDInsightPigActivity - from .hd_insight_hive_activity_py3 import HDInsightHiveActivity - from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings - from .staging_settings_py3 import StagingSettings - from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink - from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink - from .salesforce_sink_py3 import SalesforceSink - from .azure_data_explorer_sink_py3 import AzureDataExplorerSink - from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink - from .dynamics_crm_sink_py3 import DynamicsCrmSink - from .dynamics_sink_py3 import DynamicsSink - from .microsoft_access_sink_py3 import MicrosoftAccessSink - from .informix_sink_py3 import InformixSink - from .odbc_sink_py3 import OdbcSink - from .azure_search_index_sink_py3 import AzureSearchIndexSink - from .azure_blob_fs_sink_py3 import AzureBlobFSSink - from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink - from .oracle_sink_py3 import OracleSink - from .polybase_settings_py3 import PolybaseSettings - from .sql_dw_sink_py3 import SqlDWSink - from .sql_mi_sink_py3 import SqlMISink - from .azure_sql_sink_py3 import AzureSqlSink - from .sql_server_sink_py3 import SqlServerSink - from .sql_sink_py3 import SqlSink - from .document_db_collection_sink_py3 import DocumentDbCollectionSink - from .file_system_sink_py3 import FileSystemSink - from .blob_sink_py3 import BlobSink - from .binary_sink_py3 import BinarySink - from .file_server_write_settings_py3 import FileServerWriteSettings - from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings - from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings - from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings - from .store_write_settings_py3 import StoreWriteSettings - from .parquet_sink_py3 import ParquetSink - from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings - from .format_write_settings_py3 import FormatWriteSettings - from .avro_write_settings_py3 import AvroWriteSettings - from .avro_sink_py3 import AvroSink - from .azure_table_sink_py3 import AzureTableSink - from .azure_queue_sink_py3 import AzureQueueSink - from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink - from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink - from .delimited_text_sink_py3 import DelimitedTextSink - from .copy_sink_py3 import CopySink - from .copy_activity_py3 import CopyActivity - from .execution_activity_py3 import ExecutionActivity - from .web_hook_activity_py3 import WebHookActivity - from .append_variable_activity_py3 import AppendVariableActivity - from .set_variable_activity_py3 import SetVariableActivity - from .filter_activity_py3 import FilterActivity - from .validation_activity_py3 import ValidationActivity - from .until_activity_py3 import UntilActivity - from .wait_activity_py3 import WaitActivity - from .for_each_activity_py3 import ForEachActivity - from .if_condition_activity_py3 import IfConditionActivity - from .execute_pipeline_activity_py3 import ExecutePipelineActivity - from .control_activity_py3 import ControlActivity - from .linked_integration_runtime_py3 import LinkedIntegrationRuntime - from .self_hosted_integration_runtime_node_py3 import SelfHostedIntegrationRuntimeNode - from .self_hosted_integration_runtime_status_py3 import SelfHostedIntegrationRuntimeStatus - from .managed_integration_runtime_operation_result_py3 import ManagedIntegrationRuntimeOperationResult - from .managed_integration_runtime_error_py3 import ManagedIntegrationRuntimeError - from .managed_integration_runtime_node_py3 import ManagedIntegrationRuntimeNode - from .managed_integration_runtime_status_py3 import ManagedIntegrationRuntimeStatus - from .linked_integration_runtime_rbac_authorization_py3 import LinkedIntegrationRuntimeRbacAuthorization - from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization - from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType - from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime - from .entity_reference_py3 import EntityReference - from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties - from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties - from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo - from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties - from .integration_runtime_vnet_properties_py3 import IntegrationRuntimeVNetProperties - from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties - from .managed_integration_runtime_py3 import ManagedIntegrationRuntime - from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress - from .ssis_variable_py3 import SsisVariable - from .ssis_environment_py3 import SsisEnvironment - from .ssis_parameter_py3 import SsisParameter - from .ssis_package_py3 import SsisPackage - from .ssis_environment_reference_py3 import SsisEnvironmentReference - from .ssis_project_py3 import SsisProject - from .ssis_folder_py3 import SsisFolder - from .ssis_object_metadata_py3 import SsisObjectMetadata - from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse - from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData - from .integration_runtime_monitoring_data_py3 import IntegrationRuntimeMonitoringData - from .integration_runtime_auth_keys_py3 import IntegrationRuntimeAuthKeys - from .integration_runtime_regenerate_key_parameters_py3 import IntegrationRuntimeRegenerateKeyParameters - from .integration_runtime_connection_info_py3 import IntegrationRuntimeConnectionInfo + from ._models_py3 import AccessPolicyResponse + from ._models_py3 import Activity + from ._models_py3 import ActivityDependency + from ._models_py3 import ActivityPolicy + from ._models_py3 import ActivityRun + from ._models_py3 import ActivityRunsQueryResponse + from ._models_py3 import AmazonMWSLinkedService + from ._models_py3 import AmazonMWSObjectDataset + from ._models_py3 import AmazonMWSSource + from ._models_py3 import AmazonRedshiftLinkedService + from ._models_py3 import AmazonRedshiftSource + from ._models_py3 import AmazonRedshiftTableDataset + from ._models_py3 import AmazonS3Dataset + from ._models_py3 import AmazonS3LinkedService + from ._models_py3 import AmazonS3Location + from ._models_py3 import AmazonS3ReadSettings + from ._models_py3 import AppendVariableActivity + from ._models_py3 import AvroDataset + from ._models_py3 import AvroFormat + from ._models_py3 import AvroSink + from ._models_py3 import AvroSource + from ._models_py3 import AvroWriteSettings + from ._models_py3 import AzureBatchLinkedService + from ._models_py3 import AzureBlobDataset + from ._models_py3 import AzureBlobFSDataset + from ._models_py3 import AzureBlobFSLinkedService + from ._models_py3 import AzureBlobFSLocation + from ._models_py3 import AzureBlobFSReadSettings + from ._models_py3 import AzureBlobFSSink + from ._models_py3 import AzureBlobFSSource + from ._models_py3 import AzureBlobFSWriteSettings + from ._models_py3 import AzureBlobStorageLinkedService + from ._models_py3 import AzureBlobStorageLocation + from ._models_py3 import AzureBlobStorageReadSettings + from ._models_py3 import AzureBlobStorageWriteSettings + from ._models_py3 import AzureDatabricksLinkedService + from ._models_py3 import AzureDataExplorerCommandActivity + from ._models_py3 import AzureDataExplorerLinkedService + from ._models_py3 import AzureDataExplorerSink + from ._models_py3 import AzureDataExplorerSource + from ._models_py3 import AzureDataExplorerTableDataset + from ._models_py3 import AzureDataLakeAnalyticsLinkedService + from ._models_py3 import AzureDataLakeStoreDataset + from ._models_py3 import AzureDataLakeStoreLinkedService + from ._models_py3 import AzureDataLakeStoreLocation + from ._models_py3 import AzureDataLakeStoreReadSettings + from ._models_py3 import AzureDataLakeStoreSink + from ._models_py3 import AzureDataLakeStoreSource + from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureFunctionActivity + from ._models_py3 import AzureFunctionLinkedService + from ._models_py3 import AzureKeyVaultLinkedService + from ._models_py3 import AzureKeyVaultSecretReference + from ._models_py3 import AzureMariaDBLinkedService + from ._models_py3 import AzureMariaDBSource + from ._models_py3 import AzureMariaDBTableDataset + from ._models_py3 import AzureMLBatchExecutionActivity + from ._models_py3 import AzureMLLinkedService + from ._models_py3 import AzureMLUpdateResourceActivity + from ._models_py3 import AzureMLWebServiceFile + from ._models_py3 import AzureMySqlLinkedService + from ._models_py3 import AzureMySqlSink + from ._models_py3 import AzureMySqlSource + from ._models_py3 import AzureMySqlTableDataset + from ._models_py3 import AzurePostgreSqlLinkedService + from ._models_py3 import AzurePostgreSqlSink + from ._models_py3 import AzurePostgreSqlSource + from ._models_py3 import AzurePostgreSqlTableDataset + from ._models_py3 import AzureQueueSink + from ._models_py3 import AzureSearchIndexDataset + from ._models_py3 import AzureSearchIndexSink + from ._models_py3 import AzureSearchLinkedService + from ._models_py3 import AzureSqlDatabaseLinkedService + from ._models_py3 import AzureSqlDWLinkedService + from ._models_py3 import AzureSqlDWTableDataset + from ._models_py3 import AzureSqlMILinkedService + from ._models_py3 import AzureSqlMITableDataset + from ._models_py3 import AzureSqlSink + from ._models_py3 import AzureSqlSource + from ._models_py3 import AzureSqlTableDataset + from ._models_py3 import AzureStorageLinkedService + from ._models_py3 import AzureTableDataset + from ._models_py3 import AzureTableSink + from ._models_py3 import AzureTableSource + from ._models_py3 import AzureTableStorageLinkedService + from ._models_py3 import BinaryDataset + from ._models_py3 import BinarySink + from ._models_py3 import BinarySource + from ._models_py3 import BlobEventsTrigger + from ._models_py3 import BlobSink + from ._models_py3 import BlobSource + from ._models_py3 import BlobTrigger + from ._models_py3 import CassandraLinkedService + from ._models_py3 import CassandraSource + from ._models_py3 import CassandraTableDataset + from ._models_py3 import CommonDataServiceForAppsEntityDataset + from ._models_py3 import CommonDataServiceForAppsLinkedService + from ._models_py3 import CommonDataServiceForAppsSink + from ._models_py3 import CommonDataServiceForAppsSource + from ._models_py3 import ConcurLinkedService + from ._models_py3 import ConcurObjectDataset + from ._models_py3 import ConcurSource + from ._models_py3 import ControlActivity + from ._models_py3 import CopyActivity + from ._models_py3 import CopySink + from ._models_py3 import CopySource + from ._models_py3 import CosmosDbLinkedService + from ._models_py3 import CosmosDbMongoDbApiCollectionDataset + from ._models_py3 import CosmosDbMongoDbApiLinkedService + from ._models_py3 import CosmosDbMongoDbApiSink + from ._models_py3 import CosmosDbMongoDbApiSource + from ._models_py3 import CouchbaseLinkedService + from ._models_py3 import CouchbaseSource + from ._models_py3 import CouchbaseTableDataset + from ._models_py3 import CreateLinkedIntegrationRuntimeRequest + from ._models_py3 import CreateRunResponse + from ._models_py3 import CustomActivity + from ._models_py3 import CustomActivityReferenceObject + from ._models_py3 import CustomDataset + from ._models_py3 import CustomDataSourceLinkedService + from ._models_py3 import DatabricksNotebookActivity + from ._models_py3 import DatabricksSparkJarActivity + from ._models_py3 import DatabricksSparkPythonActivity + from ._models_py3 import DataLakeAnalyticsUSQLActivity + from ._models_py3 import Dataset + from ._models_py3 import DatasetBZip2Compression + from ._models_py3 import DatasetCompression + from ._models_py3 import DatasetDeflateCompression + from ._models_py3 import DatasetFolder + from ._models_py3 import DatasetGZipCompression + from ._models_py3 import DatasetLocation + from ._models_py3 import DatasetReference + from ._models_py3 import DatasetResource + from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetZipDeflateCompression + from ._models_py3 import Db2LinkedService + from ._models_py3 import Db2Source + from ._models_py3 import Db2TableDataset + from ._models_py3 import DeleteActivity + from ._models_py3 import DelimitedTextDataset + from ._models_py3 import DelimitedTextReadSettings + from ._models_py3 import DelimitedTextSink + from ._models_py3 import DelimitedTextSource + from ._models_py3 import DelimitedTextWriteSettings + from ._models_py3 import DependencyReference + from ._models_py3 import DistcpSettings + from ._models_py3 import DocumentDbCollectionDataset + from ._models_py3 import DocumentDbCollectionSink + from ._models_py3 import DocumentDbCollectionSource + from ._models_py3 import DrillLinkedService + from ._models_py3 import DrillSource + from ._models_py3 import DrillTableDataset + from ._models_py3 import DynamicsAXLinkedService + from ._models_py3 import DynamicsAXResourceDataset + from ._models_py3 import DynamicsAXSource + from ._models_py3 import DynamicsCrmEntityDataset + from ._models_py3 import DynamicsCrmLinkedService + from ._models_py3 import DynamicsCrmSink + from ._models_py3 import DynamicsCrmSource + from ._models_py3 import DynamicsEntityDataset + from ._models_py3 import DynamicsLinkedService + from ._models_py3 import DynamicsSink + from ._models_py3 import DynamicsSource + from ._models_py3 import EloquaLinkedService + from ._models_py3 import EloquaObjectDataset + from ._models_py3 import EloquaSource + from ._models_py3 import EntityReference + from ._models_py3 import ExecutePipelineActivity + from ._models_py3 import ExecuteSSISPackageActivity + from ._models_py3 import ExecutionActivity + from ._models_py3 import ExposureControlRequest + from ._models_py3 import ExposureControlResponse + from ._models_py3 import Expression + from ._models_py3 import Factory + from ._models_py3 import FactoryGitHubConfiguration + from ._models_py3 import FactoryIdentity + from ._models_py3 import FactoryRepoConfiguration + from ._models_py3 import FactoryRepoUpdate + from ._models_py3 import FactoryUpdateParameters + from ._models_py3 import FactoryVSTSConfiguration + from ._models_py3 import FileServerLinkedService + from ._models_py3 import FileServerLocation + from ._models_py3 import FileServerReadSettings + from ._models_py3 import FileServerWriteSettings + from ._models_py3 import FileShareDataset + from ._models_py3 import FileSystemSink + from ._models_py3 import FileSystemSource + from ._models_py3 import FilterActivity + from ._models_py3 import ForEachActivity + from ._models_py3 import FormatReadSettings + from ._models_py3 import FormatWriteSettings + from ._models_py3 import FtpReadSettings + from ._models_py3 import FtpServerLinkedService + from ._models_py3 import FtpServerLocation + from ._models_py3 import GetMetadataActivity + from ._models_py3 import GetSsisObjectMetadataRequest + from ._models_py3 import GitHubAccessTokenRequest + from ._models_py3 import GitHubAccessTokenResponse + from ._models_py3 import GoogleAdWordsLinkedService + from ._models_py3 import GoogleAdWordsObjectDataset + from ._models_py3 import GoogleAdWordsSource + from ._models_py3 import GoogleBigQueryLinkedService + from ._models_py3 import GoogleBigQueryObjectDataset + from ._models_py3 import GoogleBigQuerySource + from ._models_py3 import GreenplumLinkedService + from ._models_py3 import GreenplumSource + from ._models_py3 import GreenplumTableDataset + from ._models_py3 import HBaseLinkedService + from ._models_py3 import HBaseObjectDataset + from ._models_py3 import HBaseSource + from ._models_py3 import HdfsLinkedService + from ._models_py3 import HdfsLocation + from ._models_py3 import HdfsReadSettings + from ._models_py3 import HdfsSource + from ._models_py3 import HDInsightHiveActivity + from ._models_py3 import HDInsightLinkedService + from ._models_py3 import HDInsightMapReduceActivity + from ._models_py3 import HDInsightOnDemandLinkedService + from ._models_py3 import HDInsightPigActivity + from ._models_py3 import HDInsightSparkActivity + from ._models_py3 import HDInsightStreamingActivity + from ._models_py3 import HiveLinkedService + from ._models_py3 import HiveObjectDataset + from ._models_py3 import HiveSource + from ._models_py3 import HttpDataset + from ._models_py3 import HttpLinkedService + from ._models_py3 import HttpReadSettings + from ._models_py3 import HttpServerLocation + from ._models_py3 import HttpSource + from ._models_py3 import HubspotLinkedService + from ._models_py3 import HubspotObjectDataset + from ._models_py3 import HubspotSource + from ._models_py3 import IfConditionActivity + from ._models_py3 import ImpalaLinkedService + from ._models_py3 import ImpalaObjectDataset + from ._models_py3 import ImpalaSource + from ._models_py3 import InformixLinkedService + from ._models_py3 import InformixSink + from ._models_py3 import InformixSource + from ._models_py3 import InformixTableDataset + from ._models_py3 import IntegrationRuntime + from ._models_py3 import IntegrationRuntimeAuthKeys + from ._models_py3 import IntegrationRuntimeComputeProperties + from ._models_py3 import IntegrationRuntimeConnectionInfo + from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties + from ._models_py3 import IntegrationRuntimeDataProxyProperties + from ._models_py3 import IntegrationRuntimeMonitoringData + from ._models_py3 import IntegrationRuntimeNodeIpAddress + from ._models_py3 import IntegrationRuntimeNodeMonitoringData + from ._models_py3 import IntegrationRuntimeReference + from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters + from ._models_py3 import IntegrationRuntimeResource + from ._models_py3 import IntegrationRuntimeSsisCatalogInfo + from ._models_py3 import IntegrationRuntimeSsisProperties + from ._models_py3 import IntegrationRuntimeStatus + from ._models_py3 import IntegrationRuntimeStatusListResponse + from ._models_py3 import IntegrationRuntimeStatusResponse + from ._models_py3 import IntegrationRuntimeVNetProperties + from ._models_py3 import JiraLinkedService + from ._models_py3 import JiraObjectDataset + from ._models_py3 import JiraSource + from ._models_py3 import JsonFormat + from ._models_py3 import LinkedIntegrationRuntime + from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization + from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization + from ._models_py3 import LinkedIntegrationRuntimeRequest + from ._models_py3 import LinkedIntegrationRuntimeType + from ._models_py3 import LinkedService + from ._models_py3 import LinkedServiceReference + from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogStorageSettings + from ._models_py3 import LookupActivity + from ._models_py3 import MagentoLinkedService + from ._models_py3 import MagentoObjectDataset + from ._models_py3 import MagentoSource + from ._models_py3 import ManagedIntegrationRuntime + from ._models_py3 import ManagedIntegrationRuntimeError + from ._models_py3 import ManagedIntegrationRuntimeNode + from ._models_py3 import ManagedIntegrationRuntimeOperationResult + from ._models_py3 import ManagedIntegrationRuntimeStatus + from ._models_py3 import MariaDBLinkedService + from ._models_py3 import MariaDBSource + from ._models_py3 import MariaDBTableDataset + from ._models_py3 import MarketoLinkedService + from ._models_py3 import MarketoObjectDataset + from ._models_py3 import MarketoSource + from ._models_py3 import MicrosoftAccessLinkedService + from ._models_py3 import MicrosoftAccessSink + from ._models_py3 import MicrosoftAccessSource + from ._models_py3 import MicrosoftAccessTableDataset + from ._models_py3 import MongoDbCollectionDataset + from ._models_py3 import MongoDbCursorMethodsProperties + from ._models_py3 import MongoDbLinkedService + from ._models_py3 import MongoDbSource + from ._models_py3 import MongoDbV2CollectionDataset + from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Source + from ._models_py3 import MultiplePipelineTrigger + from ._models_py3 import MySqlLinkedService + from ._models_py3 import MySqlSource + from ._models_py3 import MySqlTableDataset + from ._models_py3 import NetezzaLinkedService + from ._models_py3 import NetezzaPartitionSettings + from ._models_py3 import NetezzaSource + from ._models_py3 import NetezzaTableDataset + from ._models_py3 import ODataLinkedService + from ._models_py3 import ODataResourceDataset + from ._models_py3 import ODataSource + from ._models_py3 import OdbcLinkedService + from ._models_py3 import OdbcSink + from ._models_py3 import OdbcSource + from ._models_py3 import OdbcTableDataset + from ._models_py3 import Office365Dataset + from ._models_py3 import Office365LinkedService + from ._models_py3 import Office365Source + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationLogSpecification + from ._models_py3 import OperationMetricAvailability + from ._models_py3 import OperationMetricDimension + from ._models_py3 import OperationMetricSpecification + from ._models_py3 import OperationServiceSpecification + from ._models_py3 import OracleLinkedService + from ._models_py3 import OraclePartitionSettings + from ._models_py3 import OracleServiceCloudLinkedService + from ._models_py3 import OracleServiceCloudObjectDataset + from ._models_py3 import OracleServiceCloudSource + from ._models_py3 import OracleSink + from ._models_py3 import OracleSource + from ._models_py3 import OracleTableDataset + from ._models_py3 import OrcFormat + from ._models_py3 import ParameterSpecification + from ._models_py3 import ParquetDataset + from ._models_py3 import ParquetFormat + from ._models_py3 import ParquetSink + from ._models_py3 import ParquetSource + from ._models_py3 import PaypalLinkedService + from ._models_py3 import PaypalObjectDataset + from ._models_py3 import PaypalSource + from ._models_py3 import PhoenixLinkedService + from ._models_py3 import PhoenixObjectDataset + from ._models_py3 import PhoenixSource + from ._models_py3 import PipelineFolder + from ._models_py3 import PipelineReference + from ._models_py3 import PipelineResource + from ._models_py3 import PipelineRun + from ._models_py3 import PipelineRunInvokedBy + from ._models_py3 import PipelineRunsQueryResponse + from ._models_py3 import PolybaseSettings + from ._models_py3 import PostgreSqlLinkedService + from ._models_py3 import PostgreSqlSource + from ._models_py3 import PostgreSqlTableDataset + from ._models_py3 import PrestoLinkedService + from ._models_py3 import PrestoObjectDataset + from ._models_py3 import PrestoSource + from ._models_py3 import QuickBooksLinkedService + from ._models_py3 import QuickBooksObjectDataset + from ._models_py3 import QuickBooksSource + from ._models_py3 import RecurrenceSchedule + from ._models_py3 import RecurrenceScheduleOccurrence + from ._models_py3 import RedirectIncompatibleRowSettings + from ._models_py3 import RedshiftUnloadSettings + from ._models_py3 import RelationalSource + from ._models_py3 import RelationalTableDataset + from ._models_py3 import RerunTriggerResource + from ._models_py3 import RerunTumblingWindowTrigger + from ._models_py3 import RerunTumblingWindowTriggerActionParameters + from ._models_py3 import Resource + from ._models_py3 import ResponsysLinkedService + from ._models_py3 import ResponsysObjectDataset + from ._models_py3 import ResponsysSource + from ._models_py3 import RestResourceDataset + from ._models_py3 import RestServiceLinkedService + from ._models_py3 import RestSource + from ._models_py3 import RetryPolicy + from ._models_py3 import RunFilterParameters + from ._models_py3 import RunQueryFilter + from ._models_py3 import RunQueryOrderBy + from ._models_py3 import SalesforceLinkedService + from ._models_py3 import SalesforceMarketingCloudLinkedService + from ._models_py3 import SalesforceMarketingCloudObjectDataset + from ._models_py3 import SalesforceMarketingCloudSource + from ._models_py3 import SalesforceObjectDataset + from ._models_py3 import SalesforceServiceCloudLinkedService + from ._models_py3 import SalesforceServiceCloudObjectDataset + from ._models_py3 import SalesforceServiceCloudSink + from ._models_py3 import SalesforceServiceCloudSource + from ._models_py3 import SalesforceSink + from ._models_py3 import SalesforceSource + from ._models_py3 import SapBwCubeDataset + from ._models_py3 import SapBWLinkedService + from ._models_py3 import SapBwSource + from ._models_py3 import SapCloudForCustomerLinkedService + from ._models_py3 import SapCloudForCustomerResourceDataset + from ._models_py3 import SapCloudForCustomerSink + from ._models_py3 import SapCloudForCustomerSource + from ._models_py3 import SapEccLinkedService + from ._models_py3 import SapEccResourceDataset + from ._models_py3 import SapEccSource + from ._models_py3 import SapHanaLinkedService + from ._models_py3 import SapHanaSource + from ._models_py3 import SapHanaTableDataset + from ._models_py3 import SapOpenHubLinkedService + from ._models_py3 import SapOpenHubSource + from ._models_py3 import SapOpenHubTableDataset + from ._models_py3 import SapTableLinkedService + from ._models_py3 import SapTablePartitionSettings + from ._models_py3 import SapTableResourceDataset + from ._models_py3 import SapTableSource + from ._models_py3 import ScheduleTrigger + from ._models_py3 import ScheduleTriggerRecurrence + from ._models_py3 import ScriptAction + from ._models_py3 import SecretBase + from ._models_py3 import SecureString + from ._models_py3 import SelfDependencyTumblingWindowTriggerReference + from ._models_py3 import SelfHostedIntegrationRuntime + from ._models_py3 import SelfHostedIntegrationRuntimeNode + from ._models_py3 import SelfHostedIntegrationRuntimeStatus + from ._models_py3 import ServiceNowLinkedService + from ._models_py3 import ServiceNowObjectDataset + from ._models_py3 import ServiceNowSource + from ._models_py3 import SetVariableActivity + from ._models_py3 import SftpLocation + from ._models_py3 import SftpReadSettings + from ._models_py3 import SftpServerLinkedService + from ._models_py3 import ShopifyLinkedService + from ._models_py3 import ShopifyObjectDataset + from ._models_py3 import ShopifySource + from ._models_py3 import SparkLinkedService + from ._models_py3 import SparkObjectDataset + from ._models_py3 import SparkSource + from ._models_py3 import SqlDWSink + from ._models_py3 import SqlDWSource + from ._models_py3 import SqlMISink + from ._models_py3 import SqlMISource + from ._models_py3 import SqlServerLinkedService + from ._models_py3 import SqlServerSink + from ._models_py3 import SqlServerSource + from ._models_py3 import SqlServerStoredProcedureActivity + from ._models_py3 import SqlServerTableDataset + from ._models_py3 import SqlSink + from ._models_py3 import SqlSource + from ._models_py3 import SquareLinkedService + from ._models_py3 import SquareObjectDataset + from ._models_py3 import SquareSource + from ._models_py3 import SSISAccessCredential + from ._models_py3 import SsisEnvironment + from ._models_py3 import SsisEnvironmentReference + from ._models_py3 import SSISExecutionCredential + from ._models_py3 import SSISExecutionParameter + from ._models_py3 import SsisFolder + from ._models_py3 import SSISLogLocation + from ._models_py3 import SsisObjectMetadata + from ._models_py3 import SsisObjectMetadataListResponse + from ._models_py3 import SsisObjectMetadataStatusResponse + from ._models_py3 import SsisPackage + from ._models_py3 import SSISPackageLocation + from ._models_py3 import SsisParameter + from ._models_py3 import SsisProject + from ._models_py3 import SSISPropertyOverride + from ._models_py3 import SsisVariable + from ._models_py3 import StagingSettings + from ._models_py3 import StoredProcedureParameter + from ._models_py3 import StoreReadSettings + from ._models_py3 import StoreWriteSettings + from ._models_py3 import SubResource + from ._models_py3 import SybaseLinkedService + from ._models_py3 import SybaseSource + from ._models_py3 import SybaseTableDataset + from ._models_py3 import TeradataLinkedService + from ._models_py3 import TeradataPartitionSettings + from ._models_py3 import TeradataSource + from ._models_py3 import TeradataTableDataset + from ._models_py3 import TextFormat + from ._models_py3 import Trigger + from ._models_py3 import TriggerDependencyReference + from ._models_py3 import TriggerPipelineReference + from ._models_py3 import TriggerReference + from ._models_py3 import TriggerResource + from ._models_py3 import TriggerRun + from ._models_py3 import TriggerRunsQueryResponse + from ._models_py3 import TumblingWindowTrigger + from ._models_py3 import TumblingWindowTriggerDependencyReference + from ._models_py3 import UntilActivity + from ._models_py3 import UpdateIntegrationRuntimeNodeRequest + from ._models_py3 import UpdateIntegrationRuntimeRequest + from ._models_py3 import UserAccessPolicy + from ._models_py3 import UserProperty + from ._models_py3 import ValidationActivity + from ._models_py3 import VariableSpecification + from ._models_py3 import VerticaLinkedService + from ._models_py3 import VerticaSource + from ._models_py3 import VerticaTableDataset + from ._models_py3 import WaitActivity + from ._models_py3 import WebActivity + from ._models_py3 import WebActivityAuthentication + from ._models_py3 import WebAnonymousAuthentication + from ._models_py3 import WebBasicAuthentication + from ._models_py3 import WebClientCertificateAuthentication + from ._models_py3 import WebHookActivity + from ._models_py3 import WebLinkedService + from ._models_py3 import WebLinkedServiceTypeProperties + from ._models_py3 import WebSource + from ._models_py3 import WebTableDataset + from ._models_py3 import XeroLinkedService + from ._models_py3 import XeroObjectDataset + from ._models_py3 import XeroSource + from ._models_py3 import ZohoLinkedService + from ._models_py3 import ZohoObjectDataset + from ._models_py3 import ZohoSource except (SyntaxError, ImportError): - from .resource import Resource - from .sub_resource import SubResource - from .expression import Expression - from .secure_string import SecureString - from .linked_service_reference import LinkedServiceReference - from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference - from .secret_base import SecretBase - from .factory_identity import FactoryIdentity - from .factory_repo_configuration import FactoryRepoConfiguration - from .factory import Factory - from .integration_runtime import IntegrationRuntime - from .integration_runtime_resource import IntegrationRuntimeResource - from .integration_runtime_reference import IntegrationRuntimeReference - from .integration_runtime_status import IntegrationRuntimeStatus - from .integration_runtime_status_response import IntegrationRuntimeStatusResponse - from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse - from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest - from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest - from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest - from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest - from .parameter_specification import ParameterSpecification - from .linked_service import LinkedService - from .linked_service_resource import LinkedServiceResource - from .dataset_folder import DatasetFolder - from .dataset import Dataset - from .dataset_resource import DatasetResource - from .activity_dependency import ActivityDependency - from .user_property import UserProperty - from .activity import Activity - from .variable_specification import VariableSpecification - from .pipeline_folder import PipelineFolder - from .pipeline_resource import PipelineResource - from .trigger import Trigger - from .trigger_resource import TriggerResource - from .create_run_response import CreateRunResponse - from .factory_vsts_configuration import FactoryVSTSConfiguration - from .factory_git_hub_configuration import FactoryGitHubConfiguration - from .factory_repo_update import FactoryRepoUpdate - from .git_hub_access_token_request import GitHubAccessTokenRequest - from .git_hub_access_token_response import GitHubAccessTokenResponse - from .user_access_policy import UserAccessPolicy - from .access_policy_response import AccessPolicyResponse - from .pipeline_reference import PipelineReference - from .trigger_pipeline_reference import TriggerPipelineReference - from .factory_update_parameters import FactoryUpdateParameters - from .dataset_reference import DatasetReference - from .run_query_filter import RunQueryFilter - from .run_query_order_by import RunQueryOrderBy - from .run_filter_parameters import RunFilterParameters - from .pipeline_run_invoked_by import PipelineRunInvokedBy - from .pipeline_run import PipelineRun - from .pipeline_runs_query_response import PipelineRunsQueryResponse - from .activity_run import ActivityRun - from .activity_runs_query_response import ActivityRunsQueryResponse - from .trigger_run import TriggerRun - from .trigger_runs_query_response import TriggerRunsQueryResponse - from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters - from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger - from .rerun_trigger_resource import RerunTriggerResource - from .operation_display import OperationDisplay - from .operation_log_specification import OperationLogSpecification - from .operation_metric_availability import OperationMetricAvailability - from .operation_metric_dimension import OperationMetricDimension - from .operation_metric_specification import OperationMetricSpecification - from .operation_service_specification import OperationServiceSpecification - from .operation import Operation - from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest - from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse - from .exposure_control_request import ExposureControlRequest - from .exposure_control_response import ExposureControlResponse - from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference - from .trigger_reference import TriggerReference - from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference - from .trigger_dependency_reference import TriggerDependencyReference - from .dependency_reference import DependencyReference - from .retry_policy import RetryPolicy - from .tumbling_window_trigger import TumblingWindowTrigger - from .blob_events_trigger import BlobEventsTrigger - from .blob_trigger import BlobTrigger - from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence - from .recurrence_schedule import RecurrenceSchedule - from .schedule_trigger_recurrence import ScheduleTriggerRecurrence - from .schedule_trigger import ScheduleTrigger - from .multiple_pipeline_trigger import MultiplePipelineTrigger - from .azure_function_linked_service import AzureFunctionLinkedService - from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService - from .sap_table_linked_service import SapTableLinkedService - from .google_ad_words_linked_service import GoogleAdWordsLinkedService - from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService - from .dynamics_ax_linked_service import DynamicsAXLinkedService - from .responsys_linked_service import ResponsysLinkedService - from .azure_databricks_linked_service import AzureDatabricksLinkedService - from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService - from .script_action import ScriptAction - from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService - from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService - from .netezza_linked_service import NetezzaLinkedService - from .vertica_linked_service import VerticaLinkedService - from .zoho_linked_service import ZohoLinkedService - from .xero_linked_service import XeroLinkedService - from .square_linked_service import SquareLinkedService - from .spark_linked_service import SparkLinkedService - from .shopify_linked_service import ShopifyLinkedService - from .service_now_linked_service import ServiceNowLinkedService - from .quick_books_linked_service import QuickBooksLinkedService - from .presto_linked_service import PrestoLinkedService - from .phoenix_linked_service import PhoenixLinkedService - from .paypal_linked_service import PaypalLinkedService - from .marketo_linked_service import MarketoLinkedService - from .azure_maria_db_linked_service import AzureMariaDBLinkedService - from .maria_db_linked_service import MariaDBLinkedService - from .magento_linked_service import MagentoLinkedService - from .jira_linked_service import JiraLinkedService - from .impala_linked_service import ImpalaLinkedService - from .hubspot_linked_service import HubspotLinkedService - from .hive_linked_service import HiveLinkedService - from .hbase_linked_service import HBaseLinkedService - from .greenplum_linked_service import GreenplumLinkedService - from .google_big_query_linked_service import GoogleBigQueryLinkedService - from .eloqua_linked_service import EloquaLinkedService - from .drill_linked_service import DrillLinkedService - from .couchbase_linked_service import CouchbaseLinkedService - from .concur_linked_service import ConcurLinkedService - from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService - from .amazon_mws_linked_service import AmazonMWSLinkedService - from .sap_hana_linked_service import SapHanaLinkedService - from .sap_bw_linked_service import SapBWLinkedService - from .sftp_server_linked_service import SftpServerLinkedService - from .ftp_server_linked_service import FtpServerLinkedService - from .http_linked_service import HttpLinkedService - from .azure_search_linked_service import AzureSearchLinkedService - from .custom_data_source_linked_service import CustomDataSourceLinkedService - from .amazon_redshift_linked_service import AmazonRedshiftLinkedService - from .amazon_s3_linked_service import AmazonS3LinkedService - from .rest_service_linked_service import RestServiceLinkedService - from .sap_open_hub_linked_service import SapOpenHubLinkedService - from .sap_ecc_linked_service import SapEccLinkedService - from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService - from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService - from .salesforce_linked_service import SalesforceLinkedService - from .office365_linked_service import Office365LinkedService - from .azure_blob_fs_linked_service import AzureBlobFSLinkedService - from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService - from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService - from .mongo_db_v2_linked_service import MongoDbV2LinkedService - from .mongo_db_linked_service import MongoDbLinkedService - from .cassandra_linked_service import CassandraLinkedService - from .web_client_certificate_authentication import WebClientCertificateAuthentication - from .web_basic_authentication import WebBasicAuthentication - from .web_anonymous_authentication import WebAnonymousAuthentication - from .web_linked_service_type_properties import WebLinkedServiceTypeProperties - from .web_linked_service import WebLinkedService - from .odata_linked_service import ODataLinkedService - from .hdfs_linked_service import HdfsLinkedService - from .microsoft_access_linked_service import MicrosoftAccessLinkedService - from .informix_linked_service import InformixLinkedService - from .odbc_linked_service import OdbcLinkedService - from .azure_ml_linked_service import AzureMLLinkedService - from .teradata_linked_service import TeradataLinkedService - from .db2_linked_service import Db2LinkedService - from .sybase_linked_service import SybaseLinkedService - from .postgre_sql_linked_service import PostgreSqlLinkedService - from .my_sql_linked_service import MySqlLinkedService - from .azure_my_sql_linked_service import AzureMySqlLinkedService - from .oracle_linked_service import OracleLinkedService - from .file_server_linked_service import FileServerLinkedService - from .hd_insight_linked_service import HDInsightLinkedService - from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService - from .dynamics_crm_linked_service import DynamicsCrmLinkedService - from .dynamics_linked_service import DynamicsLinkedService - from .cosmos_db_linked_service import CosmosDbLinkedService - from .azure_key_vault_linked_service import AzureKeyVaultLinkedService - from .azure_batch_linked_service import AzureBatchLinkedService - from .azure_sql_mi_linked_service import AzureSqlMILinkedService - from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService - from .sql_server_linked_service import SqlServerLinkedService - from .azure_sql_dw_linked_service import AzureSqlDWLinkedService - from .azure_table_storage_linked_service import AzureTableStorageLinkedService - from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService - from .azure_storage_linked_service import AzureStorageLinkedService - from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset - from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset - from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset - from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset - from .responsys_object_dataset import ResponsysObjectDataset - from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset - from .vertica_table_dataset import VerticaTableDataset - from .netezza_table_dataset import NetezzaTableDataset - from .zoho_object_dataset import ZohoObjectDataset - from .xero_object_dataset import XeroObjectDataset - from .square_object_dataset import SquareObjectDataset - from .spark_object_dataset import SparkObjectDataset - from .shopify_object_dataset import ShopifyObjectDataset - from .service_now_object_dataset import ServiceNowObjectDataset - from .quick_books_object_dataset import QuickBooksObjectDataset - from .presto_object_dataset import PrestoObjectDataset - from .phoenix_object_dataset import PhoenixObjectDataset - from .paypal_object_dataset import PaypalObjectDataset - from .marketo_object_dataset import MarketoObjectDataset - from .azure_maria_db_table_dataset import AzureMariaDBTableDataset - from .maria_db_table_dataset import MariaDBTableDataset - from .magento_object_dataset import MagentoObjectDataset - from .jira_object_dataset import JiraObjectDataset - from .impala_object_dataset import ImpalaObjectDataset - from .hubspot_object_dataset import HubspotObjectDataset - from .hive_object_dataset import HiveObjectDataset - from .hbase_object_dataset import HBaseObjectDataset - from .greenplum_table_dataset import GreenplumTableDataset - from .google_big_query_object_dataset import GoogleBigQueryObjectDataset - from .eloqua_object_dataset import EloquaObjectDataset - from .drill_table_dataset import DrillTableDataset - from .couchbase_table_dataset import CouchbaseTableDataset - from .concur_object_dataset import ConcurObjectDataset - from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset - from .amazon_mws_object_dataset import AmazonMWSObjectDataset - from .dataset_zip_deflate_compression import DatasetZipDeflateCompression - from .dataset_deflate_compression import DatasetDeflateCompression - from .dataset_gzip_compression import DatasetGZipCompression - from .dataset_bzip2_compression import DatasetBZip2Compression - from .dataset_compression import DatasetCompression - from .parquet_format import ParquetFormat - from .orc_format import OrcFormat - from .avro_format import AvroFormat - from .json_format import JsonFormat - from .text_format import TextFormat - from .dataset_storage_format import DatasetStorageFormat - from .http_dataset import HttpDataset - from .azure_search_index_dataset import AzureSearchIndexDataset - from .web_table_dataset import WebTableDataset - from .sap_table_resource_dataset import SapTableResourceDataset - from .rest_resource_dataset import RestResourceDataset - from .sql_server_table_dataset import SqlServerTableDataset - from .sap_open_hub_table_dataset import SapOpenHubTableDataset - from .sap_hana_table_dataset import SapHanaTableDataset - from .sap_ecc_resource_dataset import SapEccResourceDataset - from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset - from .sap_bw_cube_dataset import SapBwCubeDataset - from .sybase_table_dataset import SybaseTableDataset - from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset - from .salesforce_object_dataset import SalesforceObjectDataset - from .microsoft_access_table_dataset import MicrosoftAccessTableDataset - from .postgre_sql_table_dataset import PostgreSqlTableDataset - from .my_sql_table_dataset import MySqlTableDataset - from .odbc_table_dataset import OdbcTableDataset - from .informix_table_dataset import InformixTableDataset - from .relational_table_dataset import RelationalTableDataset - from .azure_my_sql_table_dataset import AzureMySqlTableDataset - from .teradata_table_dataset import TeradataTableDataset - from .oracle_table_dataset import OracleTableDataset - from .odata_resource_dataset import ODataResourceDataset - from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset - from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset - from .mongo_db_collection_dataset import MongoDbCollectionDataset - from .file_share_dataset import FileShareDataset - from .office365_dataset import Office365Dataset - from .azure_blob_fs_dataset import AzureBlobFSDataset - from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset - from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset - from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset - from .dynamics_entity_dataset import DynamicsEntityDataset - from .document_db_collection_dataset import DocumentDbCollectionDataset - from .custom_dataset import CustomDataset - from .cassandra_table_dataset import CassandraTableDataset - from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset - from .azure_sql_mi_table_dataset import AzureSqlMITableDataset - from .azure_sql_table_dataset import AzureSqlTableDataset - from .azure_table_dataset import AzureTableDataset - from .azure_blob_dataset import AzureBlobDataset - from .hdfs_location import HdfsLocation - from .http_server_location import HttpServerLocation - from .sftp_location import SftpLocation - from .ftp_server_location import FtpServerLocation - from .file_server_location import FileServerLocation - from .amazon_s3_location import AmazonS3Location - from .azure_data_lake_store_location import AzureDataLakeStoreLocation - from .azure_blob_fs_location import AzureBlobFSLocation - from .azure_blob_storage_location import AzureBlobStorageLocation - from .dataset_location import DatasetLocation - from .binary_dataset import BinaryDataset - from .delimited_text_dataset import DelimitedTextDataset - from .parquet_dataset import ParquetDataset - from .avro_dataset import AvroDataset - from .amazon_s3_dataset import AmazonS3Dataset - from .activity_policy import ActivityPolicy - from .azure_function_activity import AzureFunctionActivity - from .databricks_spark_python_activity import DatabricksSparkPythonActivity - from .databricks_spark_jar_activity import DatabricksSparkJarActivity - from .databricks_notebook_activity import DatabricksNotebookActivity - from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity - from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity - from .azure_ml_web_service_file import AzureMLWebServiceFile - from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity - from .get_metadata_activity import GetMetadataActivity - from .web_activity_authentication import WebActivityAuthentication - from .web_activity import WebActivity - from .redshift_unload_settings import RedshiftUnloadSettings - from .amazon_redshift_source import AmazonRedshiftSource - from .google_ad_words_source import GoogleAdWordsSource - from .oracle_service_cloud_source import OracleServiceCloudSource - from .dynamics_ax_source import DynamicsAXSource - from .responsys_source import ResponsysSource - from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource - from .vertica_source import VerticaSource - from .netezza_partition_settings import NetezzaPartitionSettings - from .netezza_source import NetezzaSource - from .zoho_source import ZohoSource - from .xero_source import XeroSource - from .square_source import SquareSource - from .spark_source import SparkSource - from .shopify_source import ShopifySource - from .service_now_source import ServiceNowSource - from .quick_books_source import QuickBooksSource - from .presto_source import PrestoSource - from .phoenix_source import PhoenixSource - from .paypal_source import PaypalSource - from .marketo_source import MarketoSource - from .azure_maria_db_source import AzureMariaDBSource - from .maria_db_source import MariaDBSource - from .magento_source import MagentoSource - from .jira_source import JiraSource - from .impala_source import ImpalaSource - from .hubspot_source import HubspotSource - from .hive_source import HiveSource - from .hbase_source import HBaseSource - from .greenplum_source import GreenplumSource - from .google_big_query_source import GoogleBigQuerySource - from .eloqua_source import EloquaSource - from .drill_source import DrillSource - from .couchbase_source import CouchbaseSource - from .concur_source import ConcurSource - from .azure_postgre_sql_source import AzurePostgreSqlSource - from .amazon_mws_source import AmazonMWSSource - from .http_source import HttpSource - from .azure_blob_fs_source import AzureBlobFSSource - from .azure_data_lake_store_source import AzureDataLakeStoreSource - from .office365_source import Office365Source - from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties - from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource - from .mongo_db_v2_source import MongoDbV2Source - from .mongo_db_source import MongoDbSource - from .cassandra_source import CassandraSource - from .web_source import WebSource - from .teradata_partition_settings import TeradataPartitionSettings - from .teradata_source import TeradataSource - from .oracle_partition_settings import OraclePartitionSettings - from .oracle_source import OracleSource - from .azure_data_explorer_source import AzureDataExplorerSource - from .azure_my_sql_source import AzureMySqlSource - from .distcp_settings import DistcpSettings - from .hdfs_source import HdfsSource - from .file_system_source import FileSystemSource - from .sql_dw_source import SqlDWSource - from .stored_procedure_parameter import StoredProcedureParameter - from .sql_mi_source import SqlMISource - from .azure_sql_source import AzureSqlSource - from .sql_server_source import SqlServerSource - from .sql_source import SqlSource - from .rest_source import RestSource - from .sap_table_partition_settings import SapTablePartitionSettings - from .sap_table_source import SapTableSource - from .sap_open_hub_source import SapOpenHubSource - from .sap_hana_source import SapHanaSource - from .sap_ecc_source import SapEccSource - from .sap_cloud_for_customer_source import SapCloudForCustomerSource - from .salesforce_service_cloud_source import SalesforceServiceCloudSource - from .salesforce_source import SalesforceSource - from .odata_source import ODataSource - from .sap_bw_source import SapBwSource - from .sybase_source import SybaseSource - from .postgre_sql_source import PostgreSqlSource - from .my_sql_source import MySqlSource - from .odbc_source import OdbcSource - from .db2_source import Db2Source - from .microsoft_access_source import MicrosoftAccessSource - from .informix_source import InformixSource - from .relational_source import RelationalSource - from .common_data_service_for_apps_source import CommonDataServiceForAppsSource - from .dynamics_crm_source import DynamicsCrmSource - from .dynamics_source import DynamicsSource - from .document_db_collection_source import DocumentDbCollectionSource - from .blob_source import BlobSource - from .azure_table_source import AzureTableSource - from .hdfs_read_settings import HdfsReadSettings - from .http_read_settings import HttpReadSettings - from .sftp_read_settings import SftpReadSettings - from .ftp_read_settings import FtpReadSettings - from .file_server_read_settings import FileServerReadSettings - from .amazon_s3_read_settings import AmazonS3ReadSettings - from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings - from .azure_blob_fs_read_settings import AzureBlobFSReadSettings - from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings - from .store_read_settings import StoreReadSettings - from .binary_source import BinarySource - from .format_read_settings import FormatReadSettings - from .delimited_text_read_settings import DelimitedTextReadSettings - from .delimited_text_source import DelimitedTextSource - from .parquet_source import ParquetSource - from .avro_source import AvroSource - from .copy_source import CopySource - from .lookup_activity import LookupActivity - from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity - from .log_storage_settings import LogStorageSettings - from .delete_activity import DeleteActivity - from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity - from .custom_activity_reference_object import CustomActivityReferenceObject - from .custom_activity import CustomActivity - from .ssis_access_credential import SSISAccessCredential - from .ssis_log_location import SSISLogLocation - from .ssis_property_override import SSISPropertyOverride - from .ssis_execution_parameter import SSISExecutionParameter - from .ssis_execution_credential import SSISExecutionCredential - from .ssis_package_location import SSISPackageLocation - from .execute_ssis_package_activity import ExecuteSSISPackageActivity - from .hd_insight_spark_activity import HDInsightSparkActivity - from .hd_insight_streaming_activity import HDInsightStreamingActivity - from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity - from .hd_insight_pig_activity import HDInsightPigActivity - from .hd_insight_hive_activity import HDInsightHiveActivity - from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings - from .staging_settings import StagingSettings - from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink - from .salesforce_service_cloud_sink import SalesforceServiceCloudSink - from .salesforce_sink import SalesforceSink - from .azure_data_explorer_sink import AzureDataExplorerSink - from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink - from .dynamics_crm_sink import DynamicsCrmSink - from .dynamics_sink import DynamicsSink - from .microsoft_access_sink import MicrosoftAccessSink - from .informix_sink import InformixSink - from .odbc_sink import OdbcSink - from .azure_search_index_sink import AzureSearchIndexSink - from .azure_blob_fs_sink import AzureBlobFSSink - from .azure_data_lake_store_sink import AzureDataLakeStoreSink - from .oracle_sink import OracleSink - from .polybase_settings import PolybaseSettings - from .sql_dw_sink import SqlDWSink - from .sql_mi_sink import SqlMISink - from .azure_sql_sink import AzureSqlSink - from .sql_server_sink import SqlServerSink - from .sql_sink import SqlSink - from .document_db_collection_sink import DocumentDbCollectionSink - from .file_system_sink import FileSystemSink - from .blob_sink import BlobSink - from .binary_sink import BinarySink - from .file_server_write_settings import FileServerWriteSettings - from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings - from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings - from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings - from .store_write_settings import StoreWriteSettings - from .parquet_sink import ParquetSink - from .delimited_text_write_settings import DelimitedTextWriteSettings - from .format_write_settings import FormatWriteSettings - from .avro_write_settings import AvroWriteSettings - from .avro_sink import AvroSink - from .azure_table_sink import AzureTableSink - from .azure_queue_sink import AzureQueueSink - from .sap_cloud_for_customer_sink import SapCloudForCustomerSink - from .azure_postgre_sql_sink import AzurePostgreSqlSink - from .delimited_text_sink import DelimitedTextSink - from .copy_sink import CopySink - from .copy_activity import CopyActivity - from .execution_activity import ExecutionActivity - from .web_hook_activity import WebHookActivity - from .append_variable_activity import AppendVariableActivity - from .set_variable_activity import SetVariableActivity - from .filter_activity import FilterActivity - from .validation_activity import ValidationActivity - from .until_activity import UntilActivity - from .wait_activity import WaitActivity - from .for_each_activity import ForEachActivity - from .if_condition_activity import IfConditionActivity - from .execute_pipeline_activity import ExecutePipelineActivity - from .control_activity import ControlActivity - from .linked_integration_runtime import LinkedIntegrationRuntime - from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode - from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus - from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult - from .managed_integration_runtime_error import ManagedIntegrationRuntimeError - from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode - from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus - from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization - from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization - from .linked_integration_runtime_type import LinkedIntegrationRuntimeType - from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime - from .entity_reference import EntityReference - from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties - from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties - from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo - from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties - from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties - from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties - from .managed_integration_runtime import ManagedIntegrationRuntime - from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress - from .ssis_variable import SsisVariable - from .ssis_environment import SsisEnvironment - from .ssis_parameter import SsisParameter - from .ssis_package import SsisPackage - from .ssis_environment_reference import SsisEnvironmentReference - from .ssis_project import SsisProject - from .ssis_folder import SsisFolder - from .ssis_object_metadata import SsisObjectMetadata - from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse - from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData - from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData - from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys - from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters - from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo -from .operation_paged import OperationPaged -from .factory_paged import FactoryPaged -from .integration_runtime_resource_paged import IntegrationRuntimeResourcePaged -from .linked_service_resource_paged import LinkedServiceResourcePaged -from .dataset_resource_paged import DatasetResourcePaged -from .pipeline_resource_paged import PipelineResourcePaged -from .trigger_resource_paged import TriggerResourcePaged -from .rerun_trigger_resource_paged import RerunTriggerResourcePaged -from .data_factory_management_client_enums import ( + from ._models import AccessPolicyResponse + from ._models import Activity + from ._models import ActivityDependency + from ._models import ActivityPolicy + from ._models import ActivityRun + from ._models import ActivityRunsQueryResponse + from ._models import AmazonMWSLinkedService + from ._models import AmazonMWSObjectDataset + from ._models import AmazonMWSSource + from ._models import AmazonRedshiftLinkedService + from ._models import AmazonRedshiftSource + from ._models import AmazonRedshiftTableDataset + from ._models import AmazonS3Dataset + from ._models import AmazonS3LinkedService + from ._models import AmazonS3Location + from ._models import AmazonS3ReadSettings + from ._models import AppendVariableActivity + from ._models import AvroDataset + from ._models import AvroFormat + from ._models import AvroSink + from ._models import AvroSource + from ._models import AvroWriteSettings + from ._models import AzureBatchLinkedService + from ._models import AzureBlobDataset + from ._models import AzureBlobFSDataset + from ._models import AzureBlobFSLinkedService + from ._models import AzureBlobFSLocation + from ._models import AzureBlobFSReadSettings + from ._models import AzureBlobFSSink + from ._models import AzureBlobFSSource + from ._models import AzureBlobFSWriteSettings + from ._models import AzureBlobStorageLinkedService + from ._models import AzureBlobStorageLocation + from ._models import AzureBlobStorageReadSettings + from ._models import AzureBlobStorageWriteSettings + from ._models import AzureDatabricksLinkedService + from ._models import AzureDataExplorerCommandActivity + from ._models import AzureDataExplorerLinkedService + from ._models import AzureDataExplorerSink + from ._models import AzureDataExplorerSource + from ._models import AzureDataExplorerTableDataset + from ._models import AzureDataLakeAnalyticsLinkedService + from ._models import AzureDataLakeStoreDataset + from ._models import AzureDataLakeStoreLinkedService + from ._models import AzureDataLakeStoreLocation + from ._models import AzureDataLakeStoreReadSettings + from ._models import AzureDataLakeStoreSink + from ._models import AzureDataLakeStoreSource + from ._models import AzureDataLakeStoreWriteSettings + from ._models import AzureFunctionActivity + from ._models import AzureFunctionLinkedService + from ._models import AzureKeyVaultLinkedService + from ._models import AzureKeyVaultSecretReference + from ._models import AzureMariaDBLinkedService + from ._models import AzureMariaDBSource + from ._models import AzureMariaDBTableDataset + from ._models import AzureMLBatchExecutionActivity + from ._models import AzureMLLinkedService + from ._models import AzureMLUpdateResourceActivity + from ._models import AzureMLWebServiceFile + from ._models import AzureMySqlLinkedService + from ._models import AzureMySqlSink + from ._models import AzureMySqlSource + from ._models import AzureMySqlTableDataset + from ._models import AzurePostgreSqlLinkedService + from ._models import AzurePostgreSqlSink + from ._models import AzurePostgreSqlSource + from ._models import AzurePostgreSqlTableDataset + from ._models import AzureQueueSink + from ._models import AzureSearchIndexDataset + from ._models import AzureSearchIndexSink + from ._models import AzureSearchLinkedService + from ._models import AzureSqlDatabaseLinkedService + from ._models import AzureSqlDWLinkedService + from ._models import AzureSqlDWTableDataset + from ._models import AzureSqlMILinkedService + from ._models import AzureSqlMITableDataset + from ._models import AzureSqlSink + from ._models import AzureSqlSource + from ._models import AzureSqlTableDataset + from ._models import AzureStorageLinkedService + from ._models import AzureTableDataset + from ._models import AzureTableSink + from ._models import AzureTableSource + from ._models import AzureTableStorageLinkedService + from ._models import BinaryDataset + from ._models import BinarySink + from ._models import BinarySource + from ._models import BlobEventsTrigger + from ._models import BlobSink + from ._models import BlobSource + from ._models import BlobTrigger + from ._models import CassandraLinkedService + from ._models import CassandraSource + from ._models import CassandraTableDataset + from ._models import CommonDataServiceForAppsEntityDataset + from ._models import CommonDataServiceForAppsLinkedService + from ._models import CommonDataServiceForAppsSink + from ._models import CommonDataServiceForAppsSource + from ._models import ConcurLinkedService + from ._models import ConcurObjectDataset + from ._models import ConcurSource + from ._models import ControlActivity + from ._models import CopyActivity + from ._models import CopySink + from ._models import CopySource + from ._models import CosmosDbLinkedService + from ._models import CosmosDbMongoDbApiCollectionDataset + from ._models import CosmosDbMongoDbApiLinkedService + from ._models import CosmosDbMongoDbApiSink + from ._models import CosmosDbMongoDbApiSource + from ._models import CouchbaseLinkedService + from ._models import CouchbaseSource + from ._models import CouchbaseTableDataset + from ._models import CreateLinkedIntegrationRuntimeRequest + from ._models import CreateRunResponse + from ._models import CustomActivity + from ._models import CustomActivityReferenceObject + from ._models import CustomDataset + from ._models import CustomDataSourceLinkedService + from ._models import DatabricksNotebookActivity + from ._models import DatabricksSparkJarActivity + from ._models import DatabricksSparkPythonActivity + from ._models import DataLakeAnalyticsUSQLActivity + from ._models import Dataset + from ._models import DatasetBZip2Compression + from ._models import DatasetCompression + from ._models import DatasetDeflateCompression + from ._models import DatasetFolder + from ._models import DatasetGZipCompression + from ._models import DatasetLocation + from ._models import DatasetReference + from ._models import DatasetResource + from ._models import DatasetStorageFormat + from ._models import DatasetZipDeflateCompression + from ._models import Db2LinkedService + from ._models import Db2Source + from ._models import Db2TableDataset + from ._models import DeleteActivity + from ._models import DelimitedTextDataset + from ._models import DelimitedTextReadSettings + from ._models import DelimitedTextSink + from ._models import DelimitedTextSource + from ._models import DelimitedTextWriteSettings + from ._models import DependencyReference + from ._models import DistcpSettings + from ._models import DocumentDbCollectionDataset + from ._models import DocumentDbCollectionSink + from ._models import DocumentDbCollectionSource + from ._models import DrillLinkedService + from ._models import DrillSource + from ._models import DrillTableDataset + from ._models import DynamicsAXLinkedService + from ._models import DynamicsAXResourceDataset + from ._models import DynamicsAXSource + from ._models import DynamicsCrmEntityDataset + from ._models import DynamicsCrmLinkedService + from ._models import DynamicsCrmSink + from ._models import DynamicsCrmSource + from ._models import DynamicsEntityDataset + from ._models import DynamicsLinkedService + from ._models import DynamicsSink + from ._models import DynamicsSource + from ._models import EloquaLinkedService + from ._models import EloquaObjectDataset + from ._models import EloquaSource + from ._models import EntityReference + from ._models import ExecutePipelineActivity + from ._models import ExecuteSSISPackageActivity + from ._models import ExecutionActivity + from ._models import ExposureControlRequest + from ._models import ExposureControlResponse + from ._models import Expression + from ._models import Factory + from ._models import FactoryGitHubConfiguration + from ._models import FactoryIdentity + from ._models import FactoryRepoConfiguration + from ._models import FactoryRepoUpdate + from ._models import FactoryUpdateParameters + from ._models import FactoryVSTSConfiguration + from ._models import FileServerLinkedService + from ._models import FileServerLocation + from ._models import FileServerReadSettings + from ._models import FileServerWriteSettings + from ._models import FileShareDataset + from ._models import FileSystemSink + from ._models import FileSystemSource + from ._models import FilterActivity + from ._models import ForEachActivity + from ._models import FormatReadSettings + from ._models import FormatWriteSettings + from ._models import FtpReadSettings + from ._models import FtpServerLinkedService + from ._models import FtpServerLocation + from ._models import GetMetadataActivity + from ._models import GetSsisObjectMetadataRequest + from ._models import GitHubAccessTokenRequest + from ._models import GitHubAccessTokenResponse + from ._models import GoogleAdWordsLinkedService + from ._models import GoogleAdWordsObjectDataset + from ._models import GoogleAdWordsSource + from ._models import GoogleBigQueryLinkedService + from ._models import GoogleBigQueryObjectDataset + from ._models import GoogleBigQuerySource + from ._models import GreenplumLinkedService + from ._models import GreenplumSource + from ._models import GreenplumTableDataset + from ._models import HBaseLinkedService + from ._models import HBaseObjectDataset + from ._models import HBaseSource + from ._models import HdfsLinkedService + from ._models import HdfsLocation + from ._models import HdfsReadSettings + from ._models import HdfsSource + from ._models import HDInsightHiveActivity + from ._models import HDInsightLinkedService + from ._models import HDInsightMapReduceActivity + from ._models import HDInsightOnDemandLinkedService + from ._models import HDInsightPigActivity + from ._models import HDInsightSparkActivity + from ._models import HDInsightStreamingActivity + from ._models import HiveLinkedService + from ._models import HiveObjectDataset + from ._models import HiveSource + from ._models import HttpDataset + from ._models import HttpLinkedService + from ._models import HttpReadSettings + from ._models import HttpServerLocation + from ._models import HttpSource + from ._models import HubspotLinkedService + from ._models import HubspotObjectDataset + from ._models import HubspotSource + from ._models import IfConditionActivity + from ._models import ImpalaLinkedService + from ._models import ImpalaObjectDataset + from ._models import ImpalaSource + from ._models import InformixLinkedService + from ._models import InformixSink + from ._models import InformixSource + from ._models import InformixTableDataset + from ._models import IntegrationRuntime + from ._models import IntegrationRuntimeAuthKeys + from ._models import IntegrationRuntimeComputeProperties + from ._models import IntegrationRuntimeConnectionInfo + from ._models import IntegrationRuntimeCustomSetupScriptProperties + from ._models import IntegrationRuntimeDataProxyProperties + from ._models import IntegrationRuntimeMonitoringData + from ._models import IntegrationRuntimeNodeIpAddress + from ._models import IntegrationRuntimeNodeMonitoringData + from ._models import IntegrationRuntimeReference + from ._models import IntegrationRuntimeRegenerateKeyParameters + from ._models import IntegrationRuntimeResource + from ._models import IntegrationRuntimeSsisCatalogInfo + from ._models import IntegrationRuntimeSsisProperties + from ._models import IntegrationRuntimeStatus + from ._models import IntegrationRuntimeStatusListResponse + from ._models import IntegrationRuntimeStatusResponse + from ._models import IntegrationRuntimeVNetProperties + from ._models import JiraLinkedService + from ._models import JiraObjectDataset + from ._models import JiraSource + from ._models import JsonFormat + from ._models import LinkedIntegrationRuntime + from ._models import LinkedIntegrationRuntimeKeyAuthorization + from ._models import LinkedIntegrationRuntimeRbacAuthorization + from ._models import LinkedIntegrationRuntimeRequest + from ._models import LinkedIntegrationRuntimeType + from ._models import LinkedService + from ._models import LinkedServiceReference + from ._models import LinkedServiceResource + from ._models import LogStorageSettings + from ._models import LookupActivity + from ._models import MagentoLinkedService + from ._models import MagentoObjectDataset + from ._models import MagentoSource + from ._models import ManagedIntegrationRuntime + from ._models import ManagedIntegrationRuntimeError + from ._models import ManagedIntegrationRuntimeNode + from ._models import ManagedIntegrationRuntimeOperationResult + from ._models import ManagedIntegrationRuntimeStatus + from ._models import MariaDBLinkedService + from ._models import MariaDBSource + from ._models import MariaDBTableDataset + from ._models import MarketoLinkedService + from ._models import MarketoObjectDataset + from ._models import MarketoSource + from ._models import MicrosoftAccessLinkedService + from ._models import MicrosoftAccessSink + from ._models import MicrosoftAccessSource + from ._models import MicrosoftAccessTableDataset + from ._models import MongoDbCollectionDataset + from ._models import MongoDbCursorMethodsProperties + from ._models import MongoDbLinkedService + from ._models import MongoDbSource + from ._models import MongoDbV2CollectionDataset + from ._models import MongoDbV2LinkedService + from ._models import MongoDbV2Source + from ._models import MultiplePipelineTrigger + from ._models import MySqlLinkedService + from ._models import MySqlSource + from ._models import MySqlTableDataset + from ._models import NetezzaLinkedService + from ._models import NetezzaPartitionSettings + from ._models import NetezzaSource + from ._models import NetezzaTableDataset + from ._models import ODataLinkedService + from ._models import ODataResourceDataset + from ._models import ODataSource + from ._models import OdbcLinkedService + from ._models import OdbcSink + from ._models import OdbcSource + from ._models import OdbcTableDataset + from ._models import Office365Dataset + from ._models import Office365LinkedService + from ._models import Office365Source + from ._models import Operation + from ._models import OperationDisplay + from ._models import OperationLogSpecification + from ._models import OperationMetricAvailability + from ._models import OperationMetricDimension + from ._models import OperationMetricSpecification + from ._models import OperationServiceSpecification + from ._models import OracleLinkedService + from ._models import OraclePartitionSettings + from ._models import OracleServiceCloudLinkedService + from ._models import OracleServiceCloudObjectDataset + from ._models import OracleServiceCloudSource + from ._models import OracleSink + from ._models import OracleSource + from ._models import OracleTableDataset + from ._models import OrcFormat + from ._models import ParameterSpecification + from ._models import ParquetDataset + from ._models import ParquetFormat + from ._models import ParquetSink + from ._models import ParquetSource + from ._models import PaypalLinkedService + from ._models import PaypalObjectDataset + from ._models import PaypalSource + from ._models import PhoenixLinkedService + from ._models import PhoenixObjectDataset + from ._models import PhoenixSource + from ._models import PipelineFolder + from ._models import PipelineReference + from ._models import PipelineResource + from ._models import PipelineRun + from ._models import PipelineRunInvokedBy + from ._models import PipelineRunsQueryResponse + from ._models import PolybaseSettings + from ._models import PostgreSqlLinkedService + from ._models import PostgreSqlSource + from ._models import PostgreSqlTableDataset + from ._models import PrestoLinkedService + from ._models import PrestoObjectDataset + from ._models import PrestoSource + from ._models import QuickBooksLinkedService + from ._models import QuickBooksObjectDataset + from ._models import QuickBooksSource + from ._models import RecurrenceSchedule + from ._models import RecurrenceScheduleOccurrence + from ._models import RedirectIncompatibleRowSettings + from ._models import RedshiftUnloadSettings + from ._models import RelationalSource + from ._models import RelationalTableDataset + from ._models import RerunTriggerResource + from ._models import RerunTumblingWindowTrigger + from ._models import RerunTumblingWindowTriggerActionParameters + from ._models import Resource + from ._models import ResponsysLinkedService + from ._models import ResponsysObjectDataset + from ._models import ResponsysSource + from ._models import RestResourceDataset + from ._models import RestServiceLinkedService + from ._models import RestSource + from ._models import RetryPolicy + from ._models import RunFilterParameters + from ._models import RunQueryFilter + from ._models import RunQueryOrderBy + from ._models import SalesforceLinkedService + from ._models import SalesforceMarketingCloudLinkedService + from ._models import SalesforceMarketingCloudObjectDataset + from ._models import SalesforceMarketingCloudSource + from ._models import SalesforceObjectDataset + from ._models import SalesforceServiceCloudLinkedService + from ._models import SalesforceServiceCloudObjectDataset + from ._models import SalesforceServiceCloudSink + from ._models import SalesforceServiceCloudSource + from ._models import SalesforceSink + from ._models import SalesforceSource + from ._models import SapBwCubeDataset + from ._models import SapBWLinkedService + from ._models import SapBwSource + from ._models import SapCloudForCustomerLinkedService + from ._models import SapCloudForCustomerResourceDataset + from ._models import SapCloudForCustomerSink + from ._models import SapCloudForCustomerSource + from ._models import SapEccLinkedService + from ._models import SapEccResourceDataset + from ._models import SapEccSource + from ._models import SapHanaLinkedService + from ._models import SapHanaSource + from ._models import SapHanaTableDataset + from ._models import SapOpenHubLinkedService + from ._models import SapOpenHubSource + from ._models import SapOpenHubTableDataset + from ._models import SapTableLinkedService + from ._models import SapTablePartitionSettings + from ._models import SapTableResourceDataset + from ._models import SapTableSource + from ._models import ScheduleTrigger + from ._models import ScheduleTriggerRecurrence + from ._models import ScriptAction + from ._models import SecretBase + from ._models import SecureString + from ._models import SelfDependencyTumblingWindowTriggerReference + from ._models import SelfHostedIntegrationRuntime + from ._models import SelfHostedIntegrationRuntimeNode + from ._models import SelfHostedIntegrationRuntimeStatus + from ._models import ServiceNowLinkedService + from ._models import ServiceNowObjectDataset + from ._models import ServiceNowSource + from ._models import SetVariableActivity + from ._models import SftpLocation + from ._models import SftpReadSettings + from ._models import SftpServerLinkedService + from ._models import ShopifyLinkedService + from ._models import ShopifyObjectDataset + from ._models import ShopifySource + from ._models import SparkLinkedService + from ._models import SparkObjectDataset + from ._models import SparkSource + from ._models import SqlDWSink + from ._models import SqlDWSource + from ._models import SqlMISink + from ._models import SqlMISource + from ._models import SqlServerLinkedService + from ._models import SqlServerSink + from ._models import SqlServerSource + from ._models import SqlServerStoredProcedureActivity + from ._models import SqlServerTableDataset + from ._models import SqlSink + from ._models import SqlSource + from ._models import SquareLinkedService + from ._models import SquareObjectDataset + from ._models import SquareSource + from ._models import SSISAccessCredential + from ._models import SsisEnvironment + from ._models import SsisEnvironmentReference + from ._models import SSISExecutionCredential + from ._models import SSISExecutionParameter + from ._models import SsisFolder + from ._models import SSISLogLocation + from ._models import SsisObjectMetadata + from ._models import SsisObjectMetadataListResponse + from ._models import SsisObjectMetadataStatusResponse + from ._models import SsisPackage + from ._models import SSISPackageLocation + from ._models import SsisParameter + from ._models import SsisProject + from ._models import SSISPropertyOverride + from ._models import SsisVariable + from ._models import StagingSettings + from ._models import StoredProcedureParameter + from ._models import StoreReadSettings + from ._models import StoreWriteSettings + from ._models import SubResource + from ._models import SybaseLinkedService + from ._models import SybaseSource + from ._models import SybaseTableDataset + from ._models import TeradataLinkedService + from ._models import TeradataPartitionSettings + from ._models import TeradataSource + from ._models import TeradataTableDataset + from ._models import TextFormat + from ._models import Trigger + from ._models import TriggerDependencyReference + from ._models import TriggerPipelineReference + from ._models import TriggerReference + from ._models import TriggerResource + from ._models import TriggerRun + from ._models import TriggerRunsQueryResponse + from ._models import TumblingWindowTrigger + from ._models import TumblingWindowTriggerDependencyReference + from ._models import UntilActivity + from ._models import UpdateIntegrationRuntimeNodeRequest + from ._models import UpdateIntegrationRuntimeRequest + from ._models import UserAccessPolicy + from ._models import UserProperty + from ._models import ValidationActivity + from ._models import VariableSpecification + from ._models import VerticaLinkedService + from ._models import VerticaSource + from ._models import VerticaTableDataset + from ._models import WaitActivity + from ._models import WebActivity + from ._models import WebActivityAuthentication + from ._models import WebAnonymousAuthentication + from ._models import WebBasicAuthentication + from ._models import WebClientCertificateAuthentication + from ._models import WebHookActivity + from ._models import WebLinkedService + from ._models import WebLinkedServiceTypeProperties + from ._models import WebSource + from ._models import WebTableDataset + from ._models import XeroLinkedService + from ._models import XeroObjectDataset + from ._models import XeroSource + from ._models import ZohoLinkedService + from ._models import ZohoObjectDataset + from ._models import ZohoSource +from ._paged_models import DatasetResourcePaged +from ._paged_models import FactoryPaged +from ._paged_models import IntegrationRuntimeResourcePaged +from ._paged_models import LinkedServiceResourcePaged +from ._paged_models import OperationPaged +from ._paged_models import PipelineResourcePaged +from ._paged_models import RerunTriggerResourcePaged +from ._paged_models import TriggerResourcePaged +from ._data_factory_management_client_enums import ( IntegrationRuntimeState, IntegrationRuntimeAutoUpdate, ParameterType, @@ -1107,513 +1113,516 @@ ) __all__ = [ - 'Resource', - 'SubResource', - 'Expression', - 'SecureString', - 'LinkedServiceReference', - 'AzureKeyVaultSecretReference', - 'SecretBase', - 'FactoryIdentity', - 'FactoryRepoConfiguration', - 'Factory', - 'IntegrationRuntime', - 'IntegrationRuntimeResource', - 'IntegrationRuntimeReference', - 'IntegrationRuntimeStatus', - 'IntegrationRuntimeStatusResponse', - 'IntegrationRuntimeStatusListResponse', - 'UpdateIntegrationRuntimeRequest', - 'UpdateIntegrationRuntimeNodeRequest', - 'LinkedIntegrationRuntimeRequest', - 'CreateLinkedIntegrationRuntimeRequest', - 'ParameterSpecification', - 'LinkedService', - 'LinkedServiceResource', - 'DatasetFolder', - 'Dataset', - 'DatasetResource', - 'ActivityDependency', - 'UserProperty', - 'Activity', - 'VariableSpecification', - 'PipelineFolder', - 'PipelineResource', - 'Trigger', - 'TriggerResource', - 'CreateRunResponse', - 'FactoryVSTSConfiguration', - 'FactoryGitHubConfiguration', - 'FactoryRepoUpdate', - 'GitHubAccessTokenRequest', - 'GitHubAccessTokenResponse', - 'UserAccessPolicy', 'AccessPolicyResponse', - 'PipelineReference', - 'TriggerPipelineReference', - 'FactoryUpdateParameters', - 'DatasetReference', - 'RunQueryFilter', - 'RunQueryOrderBy', - 'RunFilterParameters', - 'PipelineRunInvokedBy', - 'PipelineRun', - 'PipelineRunsQueryResponse', + 'Activity', + 'ActivityDependency', + 'ActivityPolicy', 'ActivityRun', 'ActivityRunsQueryResponse', - 'TriggerRun', - 'TriggerRunsQueryResponse', - 'RerunTumblingWindowTriggerActionParameters', - 'RerunTumblingWindowTrigger', - 'RerunTriggerResource', - 'OperationDisplay', - 'OperationLogSpecification', - 'OperationMetricAvailability', - 'OperationMetricDimension', - 'OperationMetricSpecification', - 'OperationServiceSpecification', - 'Operation', - 'GetSsisObjectMetadataRequest', - 'SsisObjectMetadataStatusResponse', - 'ExposureControlRequest', - 'ExposureControlResponse', - 'SelfDependencyTumblingWindowTriggerReference', - 'TriggerReference', - 'TumblingWindowTriggerDependencyReference', - 'TriggerDependencyReference', - 'DependencyReference', - 'RetryPolicy', - 'TumblingWindowTrigger', - 'BlobEventsTrigger', - 'BlobTrigger', - 'RecurrenceScheduleOccurrence', - 'RecurrenceSchedule', - 'ScheduleTriggerRecurrence', - 'ScheduleTrigger', - 'MultiplePipelineTrigger', - 'AzureFunctionLinkedService', - 'AzureDataExplorerLinkedService', - 'SapTableLinkedService', - 'GoogleAdWordsLinkedService', - 'OracleServiceCloudLinkedService', - 'DynamicsAXLinkedService', - 'ResponsysLinkedService', - 'AzureDatabricksLinkedService', - 'AzureDataLakeAnalyticsLinkedService', - 'ScriptAction', - 'HDInsightOnDemandLinkedService', - 'SalesforceMarketingCloudLinkedService', - 'NetezzaLinkedService', - 'VerticaLinkedService', - 'ZohoLinkedService', - 'XeroLinkedService', - 'SquareLinkedService', - 'SparkLinkedService', - 'ShopifyLinkedService', - 'ServiceNowLinkedService', - 'QuickBooksLinkedService', - 'PrestoLinkedService', - 'PhoenixLinkedService', - 'PaypalLinkedService', - 'MarketoLinkedService', - 'AzureMariaDBLinkedService', - 'MariaDBLinkedService', - 'MagentoLinkedService', - 'JiraLinkedService', - 'ImpalaLinkedService', - 'HubspotLinkedService', - 'HiveLinkedService', - 'HBaseLinkedService', - 'GreenplumLinkedService', - 'GoogleBigQueryLinkedService', - 'EloquaLinkedService', - 'DrillLinkedService', - 'CouchbaseLinkedService', - 'ConcurLinkedService', - 'AzurePostgreSqlLinkedService', 'AmazonMWSLinkedService', - 'SapHanaLinkedService', - 'SapBWLinkedService', - 'SftpServerLinkedService', - 'FtpServerLinkedService', - 'HttpLinkedService', - 'AzureSearchLinkedService', - 'CustomDataSourceLinkedService', + 'AmazonMWSObjectDataset', + 'AmazonMWSSource', 'AmazonRedshiftLinkedService', + 'AmazonRedshiftSource', + 'AmazonRedshiftTableDataset', + 'AmazonS3Dataset', 'AmazonS3LinkedService', - 'RestServiceLinkedService', - 'SapOpenHubLinkedService', - 'SapEccLinkedService', - 'SapCloudForCustomerLinkedService', - 'SalesforceServiceCloudLinkedService', - 'SalesforceLinkedService', - 'Office365LinkedService', + 'AmazonS3Location', + 'AmazonS3ReadSettings', + 'AppendVariableActivity', + 'AvroDataset', + 'AvroFormat', + 'AvroSink', + 'AvroSource', + 'AvroWriteSettings', + 'AzureBatchLinkedService', + 'AzureBlobDataset', + 'AzureBlobFSDataset', 'AzureBlobFSLinkedService', + 'AzureBlobFSLocation', + 'AzureBlobFSReadSettings', + 'AzureBlobFSSink', + 'AzureBlobFSSource', + 'AzureBlobFSWriteSettings', + 'AzureBlobStorageLinkedService', + 'AzureBlobStorageLocation', + 'AzureBlobStorageReadSettings', + 'AzureBlobStorageWriteSettings', + 'AzureDatabricksLinkedService', + 'AzureDataExplorerCommandActivity', + 'AzureDataExplorerLinkedService', + 'AzureDataExplorerSink', + 'AzureDataExplorerSource', + 'AzureDataExplorerTableDataset', + 'AzureDataLakeAnalyticsLinkedService', + 'AzureDataLakeStoreDataset', 'AzureDataLakeStoreLinkedService', - 'CosmosDbMongoDbApiLinkedService', - 'MongoDbV2LinkedService', - 'MongoDbLinkedService', - 'CassandraLinkedService', - 'WebClientCertificateAuthentication', - 'WebBasicAuthentication', - 'WebAnonymousAuthentication', - 'WebLinkedServiceTypeProperties', - 'WebLinkedService', - 'ODataLinkedService', - 'HdfsLinkedService', - 'MicrosoftAccessLinkedService', - 'InformixLinkedService', - 'OdbcLinkedService', + 'AzureDataLakeStoreLocation', + 'AzureDataLakeStoreReadSettings', + 'AzureDataLakeStoreSink', + 'AzureDataLakeStoreSource', + 'AzureDataLakeStoreWriteSettings', + 'AzureFunctionActivity', + 'AzureFunctionLinkedService', + 'AzureKeyVaultLinkedService', + 'AzureKeyVaultSecretReference', + 'AzureMariaDBLinkedService', + 'AzureMariaDBSource', + 'AzureMariaDBTableDataset', + 'AzureMLBatchExecutionActivity', 'AzureMLLinkedService', - 'TeradataLinkedService', - 'Db2LinkedService', - 'SybaseLinkedService', - 'PostgreSqlLinkedService', - 'MySqlLinkedService', + 'AzureMLUpdateResourceActivity', + 'AzureMLWebServiceFile', 'AzureMySqlLinkedService', - 'OracleLinkedService', - 'FileServerLinkedService', - 'HDInsightLinkedService', - 'CommonDataServiceForAppsLinkedService', - 'DynamicsCrmLinkedService', - 'DynamicsLinkedService', - 'CosmosDbLinkedService', - 'AzureKeyVaultLinkedService', - 'AzureBatchLinkedService', - 'AzureSqlMILinkedService', + 'AzureMySqlSink', + 'AzureMySqlSource', + 'AzureMySqlTableDataset', + 'AzurePostgreSqlLinkedService', + 'AzurePostgreSqlSink', + 'AzurePostgreSqlSource', + 'AzurePostgreSqlTableDataset', + 'AzureQueueSink', + 'AzureSearchIndexDataset', + 'AzureSearchIndexSink', + 'AzureSearchLinkedService', 'AzureSqlDatabaseLinkedService', - 'SqlServerLinkedService', 'AzureSqlDWLinkedService', - 'AzureTableStorageLinkedService', - 'AzureBlobStorageLinkedService', + 'AzureSqlDWTableDataset', + 'AzureSqlMILinkedService', + 'AzureSqlMITableDataset', + 'AzureSqlSink', + 'AzureSqlSource', + 'AzureSqlTableDataset', 'AzureStorageLinkedService', - 'GoogleAdWordsObjectDataset', - 'AzureDataExplorerTableDataset', - 'OracleServiceCloudObjectDataset', - 'DynamicsAXResourceDataset', - 'ResponsysObjectDataset', - 'SalesforceMarketingCloudObjectDataset', - 'VerticaTableDataset', - 'NetezzaTableDataset', - 'ZohoObjectDataset', - 'XeroObjectDataset', - 'SquareObjectDataset', - 'SparkObjectDataset', - 'ShopifyObjectDataset', - 'ServiceNowObjectDataset', - 'QuickBooksObjectDataset', - 'PrestoObjectDataset', - 'PhoenixObjectDataset', - 'PaypalObjectDataset', - 'MarketoObjectDataset', - 'AzureMariaDBTableDataset', - 'MariaDBTableDataset', - 'MagentoObjectDataset', - 'JiraObjectDataset', - 'ImpalaObjectDataset', - 'HubspotObjectDataset', - 'HiveObjectDataset', - 'HBaseObjectDataset', - 'GreenplumTableDataset', - 'GoogleBigQueryObjectDataset', - 'EloquaObjectDataset', - 'DrillTableDataset', - 'CouchbaseTableDataset', + 'AzureTableDataset', + 'AzureTableSink', + 'AzureTableSource', + 'AzureTableStorageLinkedService', + 'BinaryDataset', + 'BinarySink', + 'BinarySource', + 'BlobEventsTrigger', + 'BlobSink', + 'BlobSource', + 'BlobTrigger', + 'CassandraLinkedService', + 'CassandraSource', + 'CassandraTableDataset', + 'CommonDataServiceForAppsEntityDataset', + 'CommonDataServiceForAppsLinkedService', + 'CommonDataServiceForAppsSink', + 'CommonDataServiceForAppsSource', + 'ConcurLinkedService', 'ConcurObjectDataset', - 'AzurePostgreSqlTableDataset', - 'AmazonMWSObjectDataset', - 'DatasetZipDeflateCompression', - 'DatasetDeflateCompression', - 'DatasetGZipCompression', + 'ConcurSource', + 'ControlActivity', + 'CopyActivity', + 'CopySink', + 'CopySource', + 'CosmosDbLinkedService', + 'CosmosDbMongoDbApiCollectionDataset', + 'CosmosDbMongoDbApiLinkedService', + 'CosmosDbMongoDbApiSink', + 'CosmosDbMongoDbApiSource', + 'CouchbaseLinkedService', + 'CouchbaseSource', + 'CouchbaseTableDataset', + 'CreateLinkedIntegrationRuntimeRequest', + 'CreateRunResponse', + 'CustomActivity', + 'CustomActivityReferenceObject', + 'CustomDataset', + 'CustomDataSourceLinkedService', + 'DatabricksNotebookActivity', + 'DatabricksSparkJarActivity', + 'DatabricksSparkPythonActivity', + 'DataLakeAnalyticsUSQLActivity', + 'Dataset', 'DatasetBZip2Compression', 'DatasetCompression', - 'ParquetFormat', - 'OrcFormat', - 'AvroFormat', - 'JsonFormat', - 'TextFormat', + 'DatasetDeflateCompression', + 'DatasetFolder', + 'DatasetGZipCompression', + 'DatasetLocation', + 'DatasetReference', + 'DatasetResource', 'DatasetStorageFormat', - 'HttpDataset', - 'AzureSearchIndexDataset', - 'WebTableDataset', - 'SapTableResourceDataset', - 'RestResourceDataset', - 'SqlServerTableDataset', - 'SapOpenHubTableDataset', - 'SapHanaTableDataset', - 'SapEccResourceDataset', - 'SapCloudForCustomerResourceDataset', - 'SapBwCubeDataset', - 'SybaseTableDataset', - 'SalesforceServiceCloudObjectDataset', - 'SalesforceObjectDataset', - 'MicrosoftAccessTableDataset', - 'PostgreSqlTableDataset', - 'MySqlTableDataset', - 'OdbcTableDataset', - 'InformixTableDataset', - 'RelationalTableDataset', - 'AzureMySqlTableDataset', - 'TeradataTableDataset', - 'OracleTableDataset', - 'ODataResourceDataset', - 'CosmosDbMongoDbApiCollectionDataset', - 'MongoDbV2CollectionDataset', - 'MongoDbCollectionDataset', - 'FileShareDataset', - 'Office365Dataset', - 'AzureBlobFSDataset', - 'AzureDataLakeStoreDataset', - 'CommonDataServiceForAppsEntityDataset', + 'DatasetZipDeflateCompression', + 'Db2LinkedService', + 'Db2Source', + 'Db2TableDataset', + 'DeleteActivity', + 'DelimitedTextDataset', + 'DelimitedTextReadSettings', + 'DelimitedTextSink', + 'DelimitedTextSource', + 'DelimitedTextWriteSettings', + 'DependencyReference', + 'DistcpSettings', + 'DocumentDbCollectionDataset', + 'DocumentDbCollectionSink', + 'DocumentDbCollectionSource', + 'DrillLinkedService', + 'DrillSource', + 'DrillTableDataset', + 'DynamicsAXLinkedService', + 'DynamicsAXResourceDataset', + 'DynamicsAXSource', 'DynamicsCrmEntityDataset', + 'DynamicsCrmLinkedService', + 'DynamicsCrmSink', + 'DynamicsCrmSource', 'DynamicsEntityDataset', - 'DocumentDbCollectionDataset', - 'CustomDataset', - 'CassandraTableDataset', - 'AzureSqlDWTableDataset', - 'AzureSqlMITableDataset', - 'AzureSqlTableDataset', - 'AzureTableDataset', - 'AzureBlobDataset', - 'HdfsLocation', - 'HttpServerLocation', - 'SftpLocation', - 'FtpServerLocation', + 'DynamicsLinkedService', + 'DynamicsSink', + 'DynamicsSource', + 'EloquaLinkedService', + 'EloquaObjectDataset', + 'EloquaSource', + 'EntityReference', + 'ExecutePipelineActivity', + 'ExecuteSSISPackageActivity', + 'ExecutionActivity', + 'ExposureControlRequest', + 'ExposureControlResponse', + 'Expression', + 'Factory', + 'FactoryGitHubConfiguration', + 'FactoryIdentity', + 'FactoryRepoConfiguration', + 'FactoryRepoUpdate', + 'FactoryUpdateParameters', + 'FactoryVSTSConfiguration', + 'FileServerLinkedService', 'FileServerLocation', - 'AmazonS3Location', - 'AzureDataLakeStoreLocation', - 'AzureBlobFSLocation', - 'AzureBlobStorageLocation', - 'DatasetLocation', - 'BinaryDataset', - 'DelimitedTextDataset', - 'ParquetDataset', - 'AvroDataset', - 'AmazonS3Dataset', - 'ActivityPolicy', - 'AzureFunctionActivity', - 'DatabricksSparkPythonActivity', - 'DatabricksSparkJarActivity', - 'DatabricksNotebookActivity', - 'DataLakeAnalyticsUSQLActivity', - 'AzureMLUpdateResourceActivity', - 'AzureMLWebServiceFile', - 'AzureMLBatchExecutionActivity', + 'FileServerReadSettings', + 'FileServerWriteSettings', + 'FileShareDataset', + 'FileSystemSink', + 'FileSystemSource', + 'FilterActivity', + 'ForEachActivity', + 'FormatReadSettings', + 'FormatWriteSettings', + 'FtpReadSettings', + 'FtpServerLinkedService', + 'FtpServerLocation', 'GetMetadataActivity', - 'WebActivityAuthentication', - 'WebActivity', - 'RedshiftUnloadSettings', - 'AmazonRedshiftSource', + 'GetSsisObjectMetadataRequest', + 'GitHubAccessTokenRequest', + 'GitHubAccessTokenResponse', + 'GoogleAdWordsLinkedService', + 'GoogleAdWordsObjectDataset', 'GoogleAdWordsSource', - 'OracleServiceCloudSource', - 'DynamicsAXSource', - 'ResponsysSource', - 'SalesforceMarketingCloudSource', - 'VerticaSource', - 'NetezzaPartitionSettings', - 'NetezzaSource', - 'ZohoSource', - 'XeroSource', - 'SquareSource', - 'SparkSource', - 'ShopifySource', - 'ServiceNowSource', - 'QuickBooksSource', - 'PrestoSource', - 'PhoenixSource', - 'PaypalSource', - 'MarketoSource', - 'AzureMariaDBSource', - 'MariaDBSource', - 'MagentoSource', - 'JiraSource', - 'ImpalaSource', - 'HubspotSource', - 'HiveSource', - 'HBaseSource', - 'GreenplumSource', + 'GoogleBigQueryLinkedService', + 'GoogleBigQueryObjectDataset', 'GoogleBigQuerySource', - 'EloquaSource', - 'DrillSource', - 'CouchbaseSource', - 'ConcurSource', - 'AzurePostgreSqlSource', - 'AmazonMWSSource', + 'GreenplumLinkedService', + 'GreenplumSource', + 'GreenplumTableDataset', + 'HBaseLinkedService', + 'HBaseObjectDataset', + 'HBaseSource', + 'HdfsLinkedService', + 'HdfsLocation', + 'HdfsReadSettings', + 'HdfsSource', + 'HDInsightHiveActivity', + 'HDInsightLinkedService', + 'HDInsightMapReduceActivity', + 'HDInsightOnDemandLinkedService', + 'HDInsightPigActivity', + 'HDInsightSparkActivity', + 'HDInsightStreamingActivity', + 'HiveLinkedService', + 'HiveObjectDataset', + 'HiveSource', + 'HttpDataset', + 'HttpLinkedService', + 'HttpReadSettings', + 'HttpServerLocation', 'HttpSource', - 'AzureBlobFSSource', - 'AzureDataLakeStoreSource', - 'Office365Source', + 'HubspotLinkedService', + 'HubspotObjectDataset', + 'HubspotSource', + 'IfConditionActivity', + 'ImpalaLinkedService', + 'ImpalaObjectDataset', + 'ImpalaSource', + 'InformixLinkedService', + 'InformixSink', + 'InformixSource', + 'InformixTableDataset', + 'IntegrationRuntime', + 'IntegrationRuntimeAuthKeys', + 'IntegrationRuntimeComputeProperties', + 'IntegrationRuntimeConnectionInfo', + 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeDataProxyProperties', + 'IntegrationRuntimeMonitoringData', + 'IntegrationRuntimeNodeIpAddress', + 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeReference', + 'IntegrationRuntimeRegenerateKeyParameters', + 'IntegrationRuntimeResource', + 'IntegrationRuntimeSsisCatalogInfo', + 'IntegrationRuntimeSsisProperties', + 'IntegrationRuntimeStatus', + 'IntegrationRuntimeStatusListResponse', + 'IntegrationRuntimeStatusResponse', + 'IntegrationRuntimeVNetProperties', + 'JiraLinkedService', + 'JiraObjectDataset', + 'JiraSource', + 'JsonFormat', + 'LinkedIntegrationRuntime', + 'LinkedIntegrationRuntimeKeyAuthorization', + 'LinkedIntegrationRuntimeRbacAuthorization', + 'LinkedIntegrationRuntimeRequest', + 'LinkedIntegrationRuntimeType', + 'LinkedService', + 'LinkedServiceReference', + 'LinkedServiceResource', + 'LogStorageSettings', + 'LookupActivity', + 'MagentoLinkedService', + 'MagentoObjectDataset', + 'MagentoSource', + 'ManagedIntegrationRuntime', + 'ManagedIntegrationRuntimeError', + 'ManagedIntegrationRuntimeNode', + 'ManagedIntegrationRuntimeOperationResult', + 'ManagedIntegrationRuntimeStatus', + 'MariaDBLinkedService', + 'MariaDBSource', + 'MariaDBTableDataset', + 'MarketoLinkedService', + 'MarketoObjectDataset', + 'MarketoSource', + 'MicrosoftAccessLinkedService', + 'MicrosoftAccessSink', + 'MicrosoftAccessSource', + 'MicrosoftAccessTableDataset', + 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', - 'CosmosDbMongoDbApiSource', - 'MongoDbV2Source', + 'MongoDbLinkedService', 'MongoDbSource', - 'CassandraSource', - 'WebSource', - 'TeradataPartitionSettings', - 'TeradataSource', + 'MongoDbV2CollectionDataset', + 'MongoDbV2LinkedService', + 'MongoDbV2Source', + 'MultiplePipelineTrigger', + 'MySqlLinkedService', + 'MySqlSource', + 'MySqlTableDataset', + 'NetezzaLinkedService', + 'NetezzaPartitionSettings', + 'NetezzaSource', + 'NetezzaTableDataset', + 'ODataLinkedService', + 'ODataResourceDataset', + 'ODataSource', + 'OdbcLinkedService', + 'OdbcSink', + 'OdbcSource', + 'OdbcTableDataset', + 'Office365Dataset', + 'Office365LinkedService', + 'Office365Source', + 'Operation', + 'OperationDisplay', + 'OperationLogSpecification', + 'OperationMetricAvailability', + 'OperationMetricDimension', + 'OperationMetricSpecification', + 'OperationServiceSpecification', + 'OracleLinkedService', 'OraclePartitionSettings', + 'OracleServiceCloudLinkedService', + 'OracleServiceCloudObjectDataset', + 'OracleServiceCloudSource', + 'OracleSink', 'OracleSource', - 'AzureDataExplorerSource', - 'AzureMySqlSource', - 'DistcpSettings', - 'HdfsSource', - 'FileSystemSource', - 'SqlDWSource', - 'StoredProcedureParameter', - 'SqlMISource', - 'AzureSqlSource', - 'SqlServerSource', - 'SqlSource', - 'RestSource', - 'SapTablePartitionSettings', - 'SapTableSource', - 'SapOpenHubSource', - 'SapHanaSource', - 'SapEccSource', - 'SapCloudForCustomerSource', + 'OracleTableDataset', + 'OrcFormat', + 'ParameterSpecification', + 'ParquetDataset', + 'ParquetFormat', + 'ParquetSink', + 'ParquetSource', + 'PaypalLinkedService', + 'PaypalObjectDataset', + 'PaypalSource', + 'PhoenixLinkedService', + 'PhoenixObjectDataset', + 'PhoenixSource', + 'PipelineFolder', + 'PipelineReference', + 'PipelineResource', + 'PipelineRun', + 'PipelineRunInvokedBy', + 'PipelineRunsQueryResponse', + 'PolybaseSettings', + 'PostgreSqlLinkedService', + 'PostgreSqlSource', + 'PostgreSqlTableDataset', + 'PrestoLinkedService', + 'PrestoObjectDataset', + 'PrestoSource', + 'QuickBooksLinkedService', + 'QuickBooksObjectDataset', + 'QuickBooksSource', + 'RecurrenceSchedule', + 'RecurrenceScheduleOccurrence', + 'RedirectIncompatibleRowSettings', + 'RedshiftUnloadSettings', + 'RelationalSource', + 'RelationalTableDataset', + 'RerunTriggerResource', + 'RerunTumblingWindowTrigger', + 'RerunTumblingWindowTriggerActionParameters', + 'Resource', + 'ResponsysLinkedService', + 'ResponsysObjectDataset', + 'ResponsysSource', + 'RestResourceDataset', + 'RestServiceLinkedService', + 'RestSource', + 'RetryPolicy', + 'RunFilterParameters', + 'RunQueryFilter', + 'RunQueryOrderBy', + 'SalesforceLinkedService', + 'SalesforceMarketingCloudLinkedService', + 'SalesforceMarketingCloudObjectDataset', + 'SalesforceMarketingCloudSource', + 'SalesforceObjectDataset', + 'SalesforceServiceCloudLinkedService', + 'SalesforceServiceCloudObjectDataset', + 'SalesforceServiceCloudSink', 'SalesforceServiceCloudSource', + 'SalesforceSink', 'SalesforceSource', - 'ODataSource', + 'SapBwCubeDataset', + 'SapBWLinkedService', 'SapBwSource', - 'SybaseSource', - 'PostgreSqlSource', - 'MySqlSource', - 'OdbcSource', - 'Db2Source', - 'MicrosoftAccessSource', - 'InformixSource', - 'RelationalSource', - 'CommonDataServiceForAppsSource', - 'DynamicsCrmSource', - 'DynamicsSource', - 'DocumentDbCollectionSource', - 'BlobSource', - 'AzureTableSource', - 'HdfsReadSettings', - 'HttpReadSettings', + 'SapCloudForCustomerLinkedService', + 'SapCloudForCustomerResourceDataset', + 'SapCloudForCustomerSink', + 'SapCloudForCustomerSource', + 'SapEccLinkedService', + 'SapEccResourceDataset', + 'SapEccSource', + 'SapHanaLinkedService', + 'SapHanaSource', + 'SapHanaTableDataset', + 'SapOpenHubLinkedService', + 'SapOpenHubSource', + 'SapOpenHubTableDataset', + 'SapTableLinkedService', + 'SapTablePartitionSettings', + 'SapTableResourceDataset', + 'SapTableSource', + 'ScheduleTrigger', + 'ScheduleTriggerRecurrence', + 'ScriptAction', + 'SecretBase', + 'SecureString', + 'SelfDependencyTumblingWindowTriggerReference', + 'SelfHostedIntegrationRuntime', + 'SelfHostedIntegrationRuntimeNode', + 'SelfHostedIntegrationRuntimeStatus', + 'ServiceNowLinkedService', + 'ServiceNowObjectDataset', + 'ServiceNowSource', + 'SetVariableActivity', + 'SftpLocation', 'SftpReadSettings', - 'FtpReadSettings', - 'FileServerReadSettings', - 'AmazonS3ReadSettings', - 'AzureDataLakeStoreReadSettings', - 'AzureBlobFSReadSettings', - 'AzureBlobStorageReadSettings', - 'StoreReadSettings', - 'BinarySource', - 'FormatReadSettings', - 'DelimitedTextReadSettings', - 'DelimitedTextSource', - 'ParquetSource', - 'AvroSource', - 'CopySource', - 'LookupActivity', - 'AzureDataExplorerCommandActivity', - 'LogStorageSettings', - 'DeleteActivity', - 'SqlServerStoredProcedureActivity', - 'CustomActivityReferenceObject', - 'CustomActivity', - 'SSISAccessCredential', - 'SSISLogLocation', - 'SSISPropertyOverride', - 'SSISExecutionParameter', - 'SSISExecutionCredential', - 'SSISPackageLocation', - 'ExecuteSSISPackageActivity', - 'HDInsightSparkActivity', - 'HDInsightStreamingActivity', - 'HDInsightMapReduceActivity', - 'HDInsightPigActivity', - 'HDInsightHiveActivity', - 'RedirectIncompatibleRowSettings', - 'StagingSettings', - 'CosmosDbMongoDbApiSink', - 'SalesforceServiceCloudSink', - 'SalesforceSink', - 'AzureDataExplorerSink', - 'CommonDataServiceForAppsSink', - 'DynamicsCrmSink', - 'DynamicsSink', - 'MicrosoftAccessSink', - 'InformixSink', - 'OdbcSink', - 'AzureSearchIndexSink', - 'AzureBlobFSSink', - 'AzureDataLakeStoreSink', - 'OracleSink', - 'PolybaseSettings', + 'SftpServerLinkedService', + 'ShopifyLinkedService', + 'ShopifyObjectDataset', + 'ShopifySource', + 'SparkLinkedService', + 'SparkObjectDataset', + 'SparkSource', 'SqlDWSink', + 'SqlDWSource', 'SqlMISink', - 'AzureSqlSink', + 'SqlMISource', + 'SqlServerLinkedService', 'SqlServerSink', + 'SqlServerSource', + 'SqlServerStoredProcedureActivity', + 'SqlServerTableDataset', 'SqlSink', - 'DocumentDbCollectionSink', - 'FileSystemSink', - 'BlobSink', - 'BinarySink', - 'FileServerWriteSettings', - 'AzureDataLakeStoreWriteSettings', - 'AzureBlobFSWriteSettings', - 'AzureBlobStorageWriteSettings', - 'StoreWriteSettings', - 'ParquetSink', - 'DelimitedTextWriteSettings', - 'FormatWriteSettings', - 'AvroWriteSettings', - 'AvroSink', - 'AzureTableSink', - 'AzureQueueSink', - 'SapCloudForCustomerSink', - 'AzurePostgreSqlSink', - 'DelimitedTextSink', - 'CopySink', - 'CopyActivity', - 'ExecutionActivity', - 'WebHookActivity', - 'AppendVariableActivity', - 'SetVariableActivity', - 'FilterActivity', - 'ValidationActivity', - 'UntilActivity', - 'WaitActivity', - 'ForEachActivity', - 'IfConditionActivity', - 'ExecutePipelineActivity', - 'ControlActivity', - 'LinkedIntegrationRuntime', - 'SelfHostedIntegrationRuntimeNode', - 'SelfHostedIntegrationRuntimeStatus', - 'ManagedIntegrationRuntimeOperationResult', - 'ManagedIntegrationRuntimeError', - 'ManagedIntegrationRuntimeNode', - 'ManagedIntegrationRuntimeStatus', - 'LinkedIntegrationRuntimeRbacAuthorization', - 'LinkedIntegrationRuntimeKeyAuthorization', - 'LinkedIntegrationRuntimeType', - 'SelfHostedIntegrationRuntime', - 'EntityReference', - 'IntegrationRuntimeDataProxyProperties', - 'IntegrationRuntimeCustomSetupScriptProperties', - 'IntegrationRuntimeSsisCatalogInfo', - 'IntegrationRuntimeSsisProperties', - 'IntegrationRuntimeVNetProperties', - 'IntegrationRuntimeComputeProperties', - 'ManagedIntegrationRuntime', - 'IntegrationRuntimeNodeIpAddress', - 'SsisVariable', + 'SqlSource', + 'SquareLinkedService', + 'SquareObjectDataset', + 'SquareSource', + 'SSISAccessCredential', 'SsisEnvironment', - 'SsisParameter', - 'SsisPackage', 'SsisEnvironmentReference', - 'SsisProject', + 'SSISExecutionCredential', + 'SSISExecutionParameter', 'SsisFolder', + 'SSISLogLocation', 'SsisObjectMetadata', 'SsisObjectMetadataListResponse', - 'IntegrationRuntimeNodeMonitoringData', - 'IntegrationRuntimeMonitoringData', - 'IntegrationRuntimeAuthKeys', - 'IntegrationRuntimeRegenerateKeyParameters', - 'IntegrationRuntimeConnectionInfo', + 'SsisObjectMetadataStatusResponse', + 'SsisPackage', + 'SSISPackageLocation', + 'SsisParameter', + 'SsisProject', + 'SSISPropertyOverride', + 'SsisVariable', + 'StagingSettings', + 'StoredProcedureParameter', + 'StoreReadSettings', + 'StoreWriteSettings', + 'SubResource', + 'SybaseLinkedService', + 'SybaseSource', + 'SybaseTableDataset', + 'TeradataLinkedService', + 'TeradataPartitionSettings', + 'TeradataSource', + 'TeradataTableDataset', + 'TextFormat', + 'Trigger', + 'TriggerDependencyReference', + 'TriggerPipelineReference', + 'TriggerReference', + 'TriggerResource', + 'TriggerRun', + 'TriggerRunsQueryResponse', + 'TumblingWindowTrigger', + 'TumblingWindowTriggerDependencyReference', + 'UntilActivity', + 'UpdateIntegrationRuntimeNodeRequest', + 'UpdateIntegrationRuntimeRequest', + 'UserAccessPolicy', + 'UserProperty', + 'ValidationActivity', + 'VariableSpecification', + 'VerticaLinkedService', + 'VerticaSource', + 'VerticaTableDataset', + 'WaitActivity', + 'WebActivity', + 'WebActivityAuthentication', + 'WebAnonymousAuthentication', + 'WebBasicAuthentication', + 'WebClientCertificateAuthentication', + 'WebHookActivity', + 'WebLinkedService', + 'WebLinkedServiceTypeProperties', + 'WebSource', + 'WebTableDataset', + 'XeroLinkedService', + 'XeroObjectDataset', + 'XeroSource', + 'ZohoLinkedService', + 'ZohoObjectDataset', + 'ZohoSource', 'OperationPaged', 'FactoryPaged', 'IntegrationRuntimeResourcePaged', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py similarity index 100% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py new file mode 100644 index 000000000000..bbb7b343f03d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -0,0 +1,28730 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model +from msrest.exceptions import HttpOperationError + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = kwargs.get('policy', None) + self.access_token = kwargs.get('access_token', None) + self.data_plane_url = kwargs.get('data_plane_url', None) + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, **kwargs): + super(Activity, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.depends_on = kwargs.get('depends_on', None) + self.user_properties = kwargs.get('user_properties', None) + self.type = None + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.activity = kwargs.get('activity', None) + self.dependency_conditions = kwargs.get('dependency_conditions', None) + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.timeout = kwargs.get('timeout', None) + self.retry = kwargs.get('retry', None) + self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) + self.secure_input = kwargs.get('secure_input', None) + self.secure_output = kwargs.get('secure_output', None) + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, **kwargs): + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.connect_via = kwargs.get('connect_via', None) + self.description = kwargs.get('description', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.type = None + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.marketplace_id = kwargs.get('marketplace_id', None) + self.seller_id = kwargs.get('seller_id', None) + self.mws_auth_token = kwargs.get('mws_auth_token', None) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_key = kwargs.get('secret_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonMWS' + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, **kwargs): + super(Dataset, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.structure = kwargs.get('structure', None) + self.schema = kwargs.get('schema', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) + self.type = None + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AmazonMWSObject' + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + DelimitedTextSource, ParquetSource, AvroSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + } + + def __init__(self, **kwargs): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AmazonMWSSource' + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.database = kwargs.get('database', None) + self.port = kwargs.get('port', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonRedshift' + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) + self.type = 'AmazonRedshiftSource' + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.amazon_redshift_table_dataset_schema = kwargs.get('amazon_redshift_table_dataset_schema', None) + self.type = 'AmazonRedshiftTable' + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Dataset, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.key = kwargs.get('key', None) + self.prefix = kwargs.get('prefix', None) + self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AmazonS3Object' + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3LinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonS3' + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, **kwargs): + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AppendVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'AppendVariable' + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AvroDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) + self.type = 'Avro' + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, **kwargs): + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.serializer = kwargs.get('serializer', None) + self.deserializer = kwargs.get('deserializer', None) + self.type = None + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroFormat, self).__init__(**kwargs) + self.type = 'AvroFormat' + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, + DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, **kwargs): + super(CopySink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.write_batch_size = kwargs.get('write_batch_size', None) + self.write_batch_timeout = kwargs.get('write_batch_timeout', None) + self.sink_retry_count = kwargs.get('sink_retry_count', None) + self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'AvroSink' + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'AvroSource' + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroWriteSettings, self).__init__(**kwargs) + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBatchLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.access_key = kwargs.get('access_key', None) + self.batch_uri = kwargs.get('batch_uri', None) + self.pool_name = kwargs.get('pool_name', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBatch' + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.table_root_location = kwargs.get('table_root_location', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlob' + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlobFSFile' + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobFS' + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'AzureBlobFSSink' + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureBlobFSSource' + + +class StoreWriteSettings(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.service_endpoint = kwargs.get('service_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobStorage' + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.access_token = kwargs.get('access_token', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDatabricks' + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, **kwargs): + super(ExecutionActivity, self).__init__(**kwargs) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) + self.type = 'Execution' + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.command_timeout = kwargs.get('command_timeout', None) + self.type = 'AzureDataExplorerCommand' + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) + self.type = 'AzureDataExplorer' + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) + self.type = 'AzureDataExplorerSink' + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'AzureDataExplorerSource' + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.type = 'AzureDataExplorerTable' + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDataLakeAnalytics' + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureDataLakeStoreFile' + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) + self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.account_name = kwargs.get('account_name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDataLakeStore' + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) + self.type = 'AzureDataLakeStoreSink' + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureDataLakeStoreSource' + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.function_name = kwargs.get('function_name', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.type = 'AzureFunctionActivity' + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionLinkedService, self).__init__(**kwargs) + self.function_app_url = kwargs.get('function_app_url', None) + self.function_key = kwargs.get('function_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureFunction' + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureKeyVaultLinkedService, self).__init__(**kwargs) + self.base_url = kwargs.get('base_url', None) + self.type = 'AzureKeyVault' + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs): + super(SecretBase, self).__init__(**kwargs) + self.type = None + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = kwargs.get('store', None) + self.secret_name = kwargs.get('secret_name', None) + self.secret_version = kwargs.get('secret_version', None) + self.type = 'AzureKeyVaultSecret' + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMariaDB' + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMariaDBSource' + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMariaDBTable' + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, **kwargs): + super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) + self.global_parameters = kwargs.get('global_parameters', None) + self.web_service_outputs = kwargs.get('web_service_outputs', None) + self.web_service_inputs = kwargs.get('web_service_inputs', None) + self.type = 'AzureMLBatchExecution' + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMLLinkedService, self).__init__(**kwargs) + self.ml_endpoint = kwargs.get('ml_endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureML' + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) + self.trained_model_name = kwargs.get('trained_model_name', None) + self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) + self.trained_model_file_path = kwargs.get('trained_model_file_path', None) + self.type = 'AzureMLUpdateResource' + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, **kwargs): + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = kwargs.get('file_path', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMySql' + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzureMySqlSink' + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMySqlSource' + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMySqlTable' + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzurePostgreSql' + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzurePostgreSqlSink' + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzurePostgreSqlSource' + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) + self.type = 'AzurePostgreSqlTable' + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureQueueSink, self).__init__(**kwargs) + self.type = 'AzureQueueSink' + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchIndexDataset, self).__init__(**kwargs) + self.index_name = kwargs.get('index_name', None) + self.type = 'AzureSearchIndex' + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSearchIndexSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'AzureSearchIndexSink' + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.key = kwargs.get('key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSearch' + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlDatabase' + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDWLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlDW' + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDWTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlDWTable' + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlMI' + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlMITable' + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'AzureSqlSink' + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlTable' + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureStorage' + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureTable' + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableSink, self).__init__(**kwargs) + self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) + self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) + self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) + self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) + self.type = 'AzureTableSink' + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableSource, self).__init__(**kwargs) + self.azure_table_source_query = kwargs.get('azure_table_source_query', None) + self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) + self.type = 'AzureTableSource' + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureTableStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureTableStorage' + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(BinaryDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) + self.type = 'Binary' + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySink' + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySource' + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, **kwargs): + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.runtime_state = None + self.annotations = kwargs.get('annotations', None) + self.type = None + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, **kwargs): + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.pipelines = kwargs.get('pipelines', None) + self.type = 'MultiplePipelineTrigger' + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BlobEventsTrigger, self).__init__(**kwargs) + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.events = kwargs.get('events', None) + self.scope = kwargs.get('scope', None) + self.type = 'BlobEventsTrigger' + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(BlobSink, self).__init__(**kwargs) + self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) + self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) + self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'BlobSink' + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(BlobSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'BlobSource' + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, **kwargs): + super(BlobTrigger, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.linked_service = kwargs.get('linked_service', None) + self.type = 'BlobTrigger' + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Cassandra' + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CassandraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.consistency_level = kwargs.get('consistency_level', None) + self.type = 'CassandraSource' + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.keyspace = kwargs.get('keyspace', None) + self.type = 'CassandraTable' + + +class CloudError(Model): + """The object that defines the structure of an Azure Data Factory error + response. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.mgmt.datafactory.models.CloudError] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, + } + + def __init__(self, **kwargs): + super(CloudError, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class CloudErrorException(HttpOperationError): + """Server responsed with exception of type: 'CloudError'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args) + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'CommonDataServiceForAppsEntity' + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CommonDataServiceForApps' + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'CommonDataServiceForAppsSink' + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CommonDataServiceForAppsSource' + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Concur' + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ConcurObject' + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ConcurSource' + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, **kwargs): + super(CopyActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.sink = kwargs.get('sink', None) + self.translator = kwargs.get('translator', None) + self.enable_staging = kwargs.get('enable_staging', None) + self.staging_settings = kwargs.get('staging_settings', None) + self.parallel_copies = kwargs.get('parallel_copies', None) + self.data_integration_units = kwargs.get('data_integration_units', None) + self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) + self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.type = 'Copy' + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CosmosDb' + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'CosmosDbMongoDbApiCollection' + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'CosmosDbMongoDbApi' + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbMongoDbApiSink' + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'CosmosDbMongoDbApiSource' + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Couchbase' + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CouchbaseSource' + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'CouchbaseTable' + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.data_factory_name = kwargs.get('data_factory_name', None) + self.data_factory_location = kwargs.get('data_factory_location', None) + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = kwargs.get('run_id', None) + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.resource_linked_service = kwargs.get('resource_linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + self.reference_objects = kwargs.get('reference_objects', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.type = 'Custom' + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, **kwargs): + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = kwargs.get('linked_services', None) + self.datasets = kwargs.get('datasets', None) + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomDataset, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'CustomDataset' + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomDataSourceLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'CustomDataSource' + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksNotebookActivity, self).__init__(**kwargs) + self.notebook_path = kwargs.get('notebook_path', None) + self.base_parameters = kwargs.get('base_parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksNotebook' + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.main_class_name = kwargs.get('main_class_name', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkJar' + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.python_file = kwargs.get('python_file', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkPython' + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) + self.priority = kwargs.get('priority', None) + self.parameters = kwargs.get('parameters', None) + self.runtime_version = kwargs.get('runtime_version', None) + self.compilation_mode = kwargs.get('compilation_mode', None) + self.type = 'DataLakeAnalyticsU-SQL' + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, **kwargs): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetBZip2Compression, self).__init__(**kwargs) + self.type = 'BZip2' + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'Deflate' + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'GZip' + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, **kwargs): + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, **kwargs): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetZipDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'ZipDeflate' + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2LinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Db2' + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2Source, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'Db2Source' + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2TableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.db2_table_dataset_schema = kwargs.get('db2_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'Db2Table' + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(DeleteActivity, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Delete' + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSettings, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSettings, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs): + super(DependencyReference, self).__init__(**kwargs) + self.type = None + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) + self.temp_script_path = kwargs.get('temp_script_path', None) + self.distcp_options = kwargs.get('distcp_options', None) + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'DocumentDbCollection' + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionSink, self).__init__(**kwargs) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'DocumentDbCollectionSink' + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.type = 'DocumentDbCollectionSource' + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Drill' + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DrillSource' + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) + self.type = 'DrillTable' + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsAX' + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'DynamicsAXResource' + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsAXSource' + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsCrmEntity' + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsCrm' + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsCrmSink' + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsCrmSource' + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsEntity' + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Dynamics' + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsSink' + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsSource' + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Eloqua' + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'EloquaObject' + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'EloquaSource' + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ExecutePipelineActivity, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.parameters = kwargs.get('parameters', None) + self.wait_on_completion = kwargs.get('wait_on_completion', None) + self.type = 'ExecutePipeline' + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__(self, **kwargs): + super(ExecuteSSISPackageActivity, self).__init__(**kwargs) + self.package_location = kwargs.get('package_location', None) + self.runtime = kwargs.get('runtime', None) + self.logging_level = kwargs.get('logging_level', None) + self.environment_path = kwargs.get('environment_path', None) + self.execution_credential = kwargs.get('execution_credential', None) + self.connect_via = kwargs.get('connect_via', None) + self.project_parameters = kwargs.get('project_parameters', None) + self.package_parameters = kwargs.get('package_parameters', None) + self.project_connection_managers = kwargs.get('project_connection_managers', None) + self.package_connection_managers = kwargs.get('package_connection_managers', None) + self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) + self.type = 'ExecuteSSISPackage' + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = kwargs.get('feature_name', None) + self.feature_type = kwargs.get('feature_type', None) + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, **kwargs): + super(Expression, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.e_tag = None + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, **kwargs): + super(Factory, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.identity = kwargs.get('identity', None) + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = kwargs.get('repo_configuration', None) + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, **kwargs): + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.repository_name = kwargs.get('repository_name', None) + self.collaboration_branch = kwargs.get('collaboration_branch', None) + self.root_folder = kwargs.get('root_folder', None) + self.last_commit_id = kwargs.get('last_commit_id', None) + self.type = None + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FactoryGitHubConfiguration, self).__init__(**kwargs) + self.host_name = kwargs.get('host_name', None) + self.type = 'FactoryGitHubConfiguration' + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs): + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, **kwargs): + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = kwargs.get('factory_resource_id', None) + self.repo_configuration = kwargs.get('repo_configuration', None) + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, **kwargs): + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FactoryVSTSConfiguration, self).__init__(**kwargs) + self.project_name = kwargs.get('project_name', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.type = 'FactoryVSTSConfiguration' + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'FileServer' + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSettings, self).__init__(**kwargs) + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(FileShareDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.file_filter = kwargs.get('file_filter', None) + self.compression = kwargs.get('compression', None) + self.type = 'FileShare' + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileSystemSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'FileSystemSink' + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileSystemSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.type = 'FileSystemSource' + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, **kwargs): + super(FilterActivity, self).__init__(**kwargs) + self.items = kwargs.get('items', None) + self.condition = kwargs.get('condition', None) + self.type = 'Filter' + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(ForEachActivity, self).__init__(**kwargs) + self.is_sequential = kwargs.get('is_sequential', None) + self.batch_count = kwargs.get('batch_count', None) + self.items = kwargs.get('items', None) + self.activities = kwargs.get('activities', None) + self.type = 'ForEach' + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.type = 'FtpServer' + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, **kwargs): + super(GetMetadataActivity, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + self.field_list = kwargs.get('field_list', None) + self.type = 'GetMetadata' + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = kwargs.get('metadata_path', None) + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = kwargs.get('git_hub_access_code', None) + self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = kwargs.get('git_hub_access_token', None) + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleAdWords' + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleAdWordsObject' + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleAdWordsSource' + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQueryLinkedService, self).__init__(**kwargs) + self.project = kwargs.get('project', None) + self.additional_projects = kwargs.get('additional_projects', None) + self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleBigQuery' + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + database + table properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'GoogleBigQueryObject' + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQuerySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleBigQuerySource' + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Greenplum' + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GreenplumSource' + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) + self.type = 'GreenplumTable' + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'HBase' + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HBaseObject' + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HBaseSource' + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(HdfsLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.type = 'Hdfs' + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + self.type = 'HdfsSource' + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(HDInsightHiveActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'HDInsightHive' + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HDInsightLinkedService, self).__init__(**kwargs) + self.cluster_uri = kwargs.get('cluster_uri', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) + self.type = 'HDInsight' + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightMapReduceActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.class_name = kwargs.get('class_name', None) + self.jar_file_path = kwargs.get('jar_file_path', None) + self.jar_linked_service = kwargs.get('jar_linked_service', None) + self.jar_libs = kwargs.get('jar_libs', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightMapReduce' + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) + self.cluster_size = kwargs.get('cluster_size', None) + self.time_to_live = kwargs.get('time_to_live', None) + self.version = kwargs.get('version', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.host_subscription_id = kwargs.get('host_subscription_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.cluster_resource_group = kwargs.get('cluster_resource_group', None) + self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) + self.cluster_user_name = kwargs.get('cluster_user_name', None) + self.cluster_password = kwargs.get('cluster_password', None) + self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) + self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) + self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.cluster_type = kwargs.get('cluster_type', None) + self.spark_version = kwargs.get('spark_version', None) + self.core_configuration = kwargs.get('core_configuration', None) + self.h_base_configuration = kwargs.get('h_base_configuration', None) + self.hdfs_configuration = kwargs.get('hdfs_configuration', None) + self.hive_configuration = kwargs.get('hive_configuration', None) + self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) + self.oozie_configuration = kwargs.get('oozie_configuration', None) + self.storm_configuration = kwargs.get('storm_configuration', None) + self.yarn_configuration = kwargs.get('yarn_configuration', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.head_node_size = kwargs.get('head_node_size', None) + self.data_node_size = kwargs.get('data_node_size', None) + self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) + self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) + self.type = 'HDInsightOnDemand' + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightPigActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightPig' + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightSparkActivity, self).__init__(**kwargs) + self.root_path = kwargs.get('root_path', None) + self.entry_file_path = kwargs.get('entry_file_path', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) + self.class_name = kwargs.get('class_name', None) + self.proxy_user = kwargs.get('proxy_user', None) + self.spark_config = kwargs.get('spark_config', None) + self.type = 'HDInsightSpark' + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightStreamingActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.mapper = kwargs.get('mapper', None) + self.reducer = kwargs.get('reducer', None) + self.input = kwargs.get('input', None) + self.output = kwargs.get('output', None) + self.file_paths = kwargs.get('file_paths', None) + self.file_linked_service = kwargs.get('file_linked_service', None) + self.combiner = kwargs.get('combiner', None) + self.command_environment = kwargs.get('command_environment', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightStreaming' + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.service_discovery_mode = kwargs.get('service_discovery_mode', None) + self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) + self.use_native_query = kwargs.get('use_native_query', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Hive' + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) + self.type = 'HiveObject' + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HiveSource' + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(HttpDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'HttpFile' + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.embedded_cert_data = kwargs.get('embedded_cert_data', None) + self.cert_thumbprint = kwargs.get('cert_thumbprint', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.type = 'HttpServer' + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSettings, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.type = 'HttpSource' + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.access_token = kwargs.get('access_token', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Hubspot' + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HubspotObject' + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HubspotSource' + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(IfConditionActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.if_true_activities = kwargs.get('if_true_activities', None) + self.if_false_activities = kwargs.get('if_false_activities', None) + self.type = 'IfCondition' + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Impala' + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) + self.type = 'ImpalaObject' + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ImpalaSource' + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Informix' + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'InformixSink' + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'InformixSource' + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'InformixTable' + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, **kwargs): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.type = None + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = kwargs.get('auth_key1', None) + self.auth_key2 = kwargs.get('auth_key2', None) + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.v_net_properties = kwargs.get('v_net_properties', None) + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.nodes = kwargs.get('nodes', None) + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, **kwargs): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.data_factory_name = None + self.state = None + self.type = None + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = kwargs.get('properties', None) + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Jira' + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'JiraObject' + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'JiraSource' + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JsonFormat, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.json_node_reference = kwargs.get('json_node_reference', None) + self.json_path_definition = kwargs.get('json_path_definition', None) + self.type = 'JsonFormat' + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = kwargs.get('key', None) + self.authorization_type = 'Key' + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) + self.authorization_type = 'RBAC' + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = kwargs.get('linked_factory_name', None) + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, **kwargs): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, **kwargs): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LookupActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.dataset = kwargs.get('dataset', None) + self.first_row_only = kwargs.get('first_row_only', None) + self.type = 'Lookup' + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Magento' + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MagentoObject' + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MagentoSource' + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntime, self).__init__(**kwargs) + self.state = None + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) + self.type = 'Managed' + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.time = None + self.code = None + self.parameters = None + self.message = None + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_id = None + self.status = None + self.errors = kwargs.get('errors', None) + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MariaDB' + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MariaDBSource' + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MariaDBTable' + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Marketo' + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MarketoObject' + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MarketoSource' + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MicrosoftAccess' + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'MicrosoftAccessSink' + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MicrosoftAccessSource' + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MicrosoftAccessTable' + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'MongoDbCollection' + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.database_name = kwargs.get('database_name', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.auth_source = kwargs.get('auth_source', None) + self.port = kwargs.get('port', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MongoDb' + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MongoDbSource' + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbV2Collection' + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbV2' + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'MongoDbV2Source' + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MySql' + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MySqlSource' + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MySqlTable' + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Netezza' + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(NetezzaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'NetezzaSource' + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.netezza_table_dataset_schema = kwargs.get('netezza_table_dataset_schema', None) + self.type = 'NetezzaTable' + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OData' + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'ODataResource' + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ODataSource' + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Odbc' + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'OdbcSink' + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OdbcSource' + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OdbcTable' + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) + self.type = 'Office365Table' + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Office365' + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.type = 'Office365Source' + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, **kwargs): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.origin = kwargs.get('origin', None) + self.display = kwargs.get('display', None) + self.service_specification = kwargs.get('service_specification', None) + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationDisplay, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.blob_duration = kwargs.get('blob_duration', None) + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = kwargs.get('time_grain', None) + self.blob_duration = kwargs.get('blob_duration', None) + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, **kwargs): + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.display_description = kwargs.get('display_description', None) + self.unit = kwargs.get('unit', None) + self.aggregation_type = kwargs.get('aggregation_type', None) + self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) + self.source_mdm_account = kwargs.get('source_mdm_account', None) + self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) + self.availabilities = kwargs.get('availabilities', None) + self.dimensions = kwargs.get('dimensions', None) + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, **kwargs): + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = kwargs.get('log_specifications', None) + self.metric_specifications = kwargs.get('metric_specifications', None) + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Oracle' + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleServiceCloud' + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleServiceCloudObject' + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OracleServiceCloudSource' + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'OracleSink' + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(OracleSource, self).__init__(**kwargs) + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'OracleSource' + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'OracleTable' + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OrcFormat, self).__init__(**kwargs) + self.type = 'OrcFormat' + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParameterSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ParquetFormat, self).__init__(**kwargs) + self.type = 'ParquetFormat' + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSink' + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Paypal' + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'PaypalObject' + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PaypalSource' + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Phoenix' + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) + self.type = 'PhoenixObject' + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PhoenixSource' + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, **kwargs): + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.name = kwargs.get('name', None) + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, **kwargs): + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.activities = kwargs.get('activities', None) + self.parameters = kwargs.get('parameters', None) + self.variables = kwargs.get('variables', None) + self.concurrency = kwargs.get('concurrency', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reject_type = kwargs.get('reject_type', None) + self.reject_value = kwargs.get('reject_value', None) + self.reject_sample_value = kwargs.get('reject_sample_value', None) + self.use_type_default = kwargs.get('use_type_default', None) + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'PostgreSql' + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PostgreSqlSource' + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.postgre_sql_table_dataset_schema = kwargs.get('postgre_sql_table_dataset_schema', None) + self.type = 'PostgreSqlTable' + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.server_version = kwargs.get('server_version', None) + self.catalog = kwargs.get('catalog', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.time_zone_id = kwargs.get('time_zone_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Presto' + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) + self.type = 'PrestoObject' + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PrestoSource' + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.company_id = kwargs.get('company_id', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.consumer_secret = kwargs.get('consumer_secret', None) + self.access_token = kwargs.get('access_token', None) + self.access_token_secret = kwargs.get('access_token_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'QuickBooks' + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'QuickBooksObject' + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'QuickBooksSource' + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, **kwargs): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) + self.bucket_name = kwargs.get('bucket_name', None) + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RelationalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'RelationalSource' + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RelationalTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'RelationalTable' + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, **kwargs): + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.parent_trigger = kwargs.get('parent_trigger', None) + self.requested_start_time = kwargs.get('requested_start_time', None) + self.requested_end_time = kwargs.get('requested_end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.type = 'RerunTumblingWindowTrigger' + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Responsys' + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ResponsysObject' + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ResponsysSource' + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.type = 'RestResource' + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'RestService' + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.type = 'RestSource' + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, **kwargs): + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.last_updated_after = kwargs.get('last_updated_after', None) + self.last_updated_before = kwargs.get('last_updated_before', None) + self.filters = kwargs.get('filters', None) + self.order_by = kwargs.get('order_by', None) + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = kwargs.get('operand', None) + self.operator = kwargs.get('operator', None) + self.values = kwargs.get('values', None) + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = kwargs.get('order_by', None) + self.order = kwargs.get('order', None) + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Salesforce' + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceMarketingCloud' + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SalesforceMarketingCloudObject' + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SalesforceMarketingCloudSource' + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceObject' + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceServiceCloud' + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceServiceCloudObject' + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceServiceCloudSink' + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceServiceCloudSource' + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceSink' + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceSource' + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapBwCubeDataset, self).__init__(**kwargs) + self.type = 'SapBwCube' + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBWLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapBW' + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBwSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapBwSource' + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapCloudForCustomer' + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'SapCloudForCustomerResource' + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'SapCloudForCustomerSink' + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapCloudForCustomerSource' + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapEccLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapEcc' + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapEccResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'SapEccResource' + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapEccSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapEccSource' + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapHana' + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.type = 'SapHanaSource' + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaTableDataset, self).__init__(**kwargs) + self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SapHanaTable' + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapOpenHub' + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubSource' + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubTable' + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapTable' + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableResourceDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SapTableResource' + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(SapTableSource, self).__init__(**kwargs) + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'SapTableSource' + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, **kwargs): + super(ScheduleTrigger, self).__init__(**kwargs) + self.recurrence = kwargs.get('recurrence', None) + self.type = 'ScheduleTrigger' + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, **kwargs): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ScriptAction, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.uri = kwargs.get('uri', None) + self.roles = kwargs.get('roles', None) + self.parameters = kwargs.get('parameters', None) + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecureString, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = 'SecureString' + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + self.type = 'SelfDependencyTumblingWindowTriggerReference' + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.linked_info = kwargs.get('linked_info', None) + self.type = 'SelfHosted' + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = kwargs.get('nodes', None) + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = kwargs.get('links', None) + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'ServiceNow' + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ServiceNowObject' + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ServiceNowSource' + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SetVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'SetVariable' + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.private_key_path = kwargs.get('private_key_path', None) + self.private_key_content = kwargs.get('private_key_content', None) + self.pass_phrase = kwargs.get('pass_phrase', None) + self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) + self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) + self.type = 'Sftp' + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifyLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Shopify' + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifyObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ShopifyObject' + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ShopifySource' + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Spark' + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) + self.type = 'SparkObject' + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SparkSource' + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + } + + def __init__(self, **kwargs): + super(SqlDWSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.allow_poly_base = kwargs.get('allow_poly_base', None) + self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.type = 'SqlDWSink' + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlDWSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlDWSource' + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlMISink' + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlMISource' + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SqlServer' + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlServerSink' + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, **kwargs): + super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) + self.stored_procedure_name = kwargs.get('stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlServerStoredProcedure' + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SqlServerTable' + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlSink' + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, **kwargs): + super(SqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlSource' + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.redirect_uri = kwargs.get('redirect_uri', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Square' + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SquareObject' + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SquareSource' + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, **kwargs): + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.type = None + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + self.type = 'Environment' + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, **kwargs): + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = kwargs.get('log_path', None) + self.access_credential = kwargs.get('access_credential', None) + self.log_refresh_interval = kwargs.get('log_refresh_interval', None) + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.name = kwargs.get('name', None) + self.properties = kwargs.get('properties', None) + self.error = kwargs.get('error', None) + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Package' + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = kwargs.get('package_path', None) + self.type = kwargs.get('type', None) + self.package_password = kwargs.get('package_password', None) + self.access_credential = kwargs.get('access_credential', None) + self.configuration_path = kwargs.get('configuration_path', None) + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Project' + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.is_sensitive = kwargs.get('is_sensitive', None) + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + self.enable_compression = kwargs.get('enable_compression', None) + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = kwargs.get('type', None) + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.schema = kwargs.get('schema', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Sybase' + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SybaseSource' + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SybaseTable' + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Teradata' + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'TeradataSource' + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.type = 'TeradataTable' + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TextFormat, self).__init__(**kwargs) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.escape_char = kwargs.get('escape_char', None) + self.quote_char = kwargs.get('quote_char', None) + self.null_value = kwargs.get('null_value', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.type = 'TextFormat' + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, **kwargs): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = kwargs.get('reference_trigger', None) + self.type = 'TriggerDependencyReference' + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = kwargs.get('pipeline_reference', None) + self.parameters = kwargs.get('parameters', None) + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, **kwargs): + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, **kwargs): + super(TriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, **kwargs): + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) + self.type = 'TumblingWindowTrigger' + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + self.type = 'TumblingWindowTriggerDependencyReference' + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(UntilActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.timeout = kwargs.get('timeout', None) + self.activities = kwargs.get('activities', None) + self.type = 'Until' + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = kwargs.get('auto_update', None) + self.update_delay_offset = kwargs.get('update_delay_offset', None) + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = kwargs.get('permissions', None) + self.access_resource_path = kwargs.get('access_resource_path', None) + self.profile_name = kwargs.get('profile_name', None) + self.start_time = kwargs.get('start_time', None) + self.expire_time = kwargs.get('expire_time', None) + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(UserProperty, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Validation' + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VariableSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Vertica' + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'VerticaSource' + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) + self.type = 'VerticaTable' + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(WaitActivity, self).__init__(**kwargs) + self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) + self.type = 'Wait' + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, **kwargs): + super(WebActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.url = kwargs.get('url', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) + self.type = 'WebActivity' + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.pfx = kwargs.get('pfx', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.resource = kwargs.get('resource', None) + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, **kwargs): + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = None + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebAnonymousAuthentication, self).__init__(**kwargs) + self.authentication_type = 'Anonymous' + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(WebBasicAuthentication, self).__init__(**kwargs) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.authentication_type = 'Basic' + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(WebClientCertificateAuthentication, self).__init__(**kwargs) + self.pfx = kwargs.get('pfx', None) + self.password = kwargs.get('password', None) + self.authentication_type = 'ClientCertificate' + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.type = 'WebHook' + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, **kwargs): + super(WebLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'Web' + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebSource, self).__init__(**kwargs) + self.type = 'WebSource' + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(WebTableDataset, self).__init__(**kwargs) + self.index = kwargs.get('index', None) + self.path = kwargs.get('path', None) + self.type = 'WebTable' + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.private_key = kwargs.get('private_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Xero' + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'XeroObject' + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'XeroSource' + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Zoho' + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ZohoObject' + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py new file mode 100644 index 000000000000..fb632f37b204 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -0,0 +1,28730 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model +from msrest.exceptions import HttpOperationError + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = policy + self.access_token = access_token + self.data_plane_url = data_plane_url + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(Activity, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.description = description + self.depends_on = depends_on + self.user_properties = user_properties + self.type = None + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.activity = activity + self.dependency_conditions = dependency_conditions + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.timeout = timeout + self.retry = retry + self.retry_interval_in_seconds = retry_interval_in_seconds + self.secure_input = secure_input + self.secure_output = secure_output + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.connect_via = connect_via + self.description = description + self.parameters = parameters + self.annotations = annotations + self.type = None + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.marketplace_id = marketplace_id + self.seller_id = seller_id + self.mws_auth_token = mws_auth_token + self.access_key_id = access_key_id + self.secret_key = secret_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'AmazonMWS' + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(Dataset, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.structure = structure + self.schema = schema + self.linked_service_name = linked_service_name + self.parameters = parameters + self.annotations = annotations + self.folder = folder + self.type = None + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AmazonMWSObject' + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + DelimitedTextSource, ParquetSource, AvroSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AmazonMWSSource' + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.username = username + self.password = password + self.database = database + self.port = port + self.encrypted_credential = encrypted_credential + self.type = 'AmazonRedshift' + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.redshift_unload_settings = redshift_unload_settings + self.type = 'AmazonRedshiftSource' + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: + super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema + self.type = 'AmazonRedshiftTable' + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.bucket_name = bucket_name + self.key = key + self.prefix = prefix + self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AmazonS3Object' + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + self.type = 'AmazonS3' + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'AppendVariable' + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level + self.type = 'Avro' + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.serializer = serializer + self.deserializer = deserializer + self.type = None + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'AvroFormat' + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, + DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySink, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'AvroSink' + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'AvroSource' + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.record_name = record_name + self.record_namespace = record_namespace + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureBatch' + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.table_root_location = table_root_location + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AzureBlob' + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' + + +class StoreWriteSettings(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobStorage' + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricks' + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.linked_service_name = linked_service_name + self.policy = policy + self.type = 'Execution' + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.command_timeout = command_timeout + self.type = 'AzureDataExplorerCommand' + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeAnalytics' + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureDataLakeStoreFile' + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.data_lake_store_uri = data_lake_store_uri + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeStore' + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel + self.type = 'AzureDataLakeStoreSink' + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'AzureDataLakeStoreSource' + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body + self.type = 'AzureFunctionActivity' + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.function_app_url = function_app_url + self.function_key = function_key + self.encrypted_credential = encrypted_credential + self.type = 'AzureFunction' + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.base_url = base_url + self.type = 'AzureKeyVault' + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs) -> None: + super(SecretBase, self).__init__(**kwargs) + self.type = None + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version + self.type = 'AzureKeyVaultSecret' + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'AzureMariaDB' + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMariaDBSource' + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMariaDBTable' + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs + self.type = 'AzureMLBatchExecution' + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureML' + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path + self.type = 'AzureMLUpdateResource' + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = file_path + self.linked_service_name = linked_service_name + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzureMySql' + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzureMySqlSink' + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMySqlSource' + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMySqlTable' + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzurePostgreSql' + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzurePostgreSqlSink' + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzurePostgreSqlSource' + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema + self.type = 'AzurePostgreSqlTable' + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureQueueSink' + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index_name = index_name + self.type = 'AzureSearchIndex' + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'AzureSearchIndexSink' + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential + self.type = 'AzureSearch' + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDatabase' + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDW' + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema + self.table = table + self.type = 'AzureSqlDWTable' + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlMI' + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table + self.type = 'AzureSqlMITable' + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'AzureSqlSink' + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema + self.table = table + self.type = 'AzureSqlTable' + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureStorage' + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureTable' + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type + self.type = 'AzureTableSink' + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found + self.type = 'AzureTableSource' + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureTableStorage' + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression = compression + self.type = 'Binary' + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySink' + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySource' + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.runtime_state = None + self.annotations = annotations + self.type = None + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipelines = pipelines + self.type = 'MultiplePipelineTrigger' + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.events = events + self.scope = scope + self.type = 'BlobEventsTrigger' + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior + self.type = 'BlobSink' + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'BlobSource' + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service + self.type = 'BlobTrigger' + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Cassandra' + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.consistency_level = consistency_level + self.type = 'CassandraSource' + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.keyspace = keyspace + self.type = 'CassandraTable' + + +class CloudError(Model): + """The object that defines the structure of an Azure Data Factory error + response. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.mgmt.datafactory.models.CloudError] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, + } + + def __init__(self, *, code: str, message: str, target: str=None, details=None, **kwargs) -> None: + super(CloudError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class CloudErrorException(HttpOperationError): + """Server responsed with exception of type: 'CloudError'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args) + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'CommonDataServiceForAppsEntity' + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'CommonDataServiceForApps' + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'CommonDataServiceForAppsSink' + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CommonDataServiceForAppsSource' + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Concur' + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ConcurObject' + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ConcurSource' + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve + self.inputs = inputs + self.outputs = outputs + self.type = 'Copy' + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.encrypted_credential = encrypted_credential + self.type = 'CosmosDb' + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.cred_string = cred_string + self.encrypted_credential = encrypted_credential + self.type = 'Couchbase' + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CouchbaseSource' + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'CouchbaseTable' + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = name + self.subscription_id = subscription_id + self.data_factory_name = data_factory_name + self.data_factory_location = data_factory_location + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, *, run_id: str, **kwargs) -> None: + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = run_id + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days + self.type = 'Custom' + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = linked_services + self.datasets = datasets + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataset' + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataSource' + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries + self.type = 'DatabricksNotebook' + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkJar' + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkPython' + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode + self.type = 'DataLakeAnalyticsU-SQL' + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BZip2' + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'Deflate' + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(DatasetFolder, self).__init__(**kwargs) + self.name = name + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'GZip' + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'ZipDeflate' + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Db2' + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'Db2Source' + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.db2_table_dataset_schema = db2_table_dataset_schema + self.table = table + self.type = 'Db2Table' + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset + self.type = 'Delete' + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs) -> None: + super(DependencyReference, self).__init__(**kwargs) + self.type = None + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'DocumentDbCollection' + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior + self.type = 'DocumentDbCollectionSink' + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.nesting_separator = nesting_separator + self.type = 'DocumentDbCollectionSource' + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Drill' + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DrillSource' + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.drill_table_dataset_schema = drill_table_dataset_schema + self.type = 'DrillTable' + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsCrmEntity' + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsCrm' + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsCrmSink' + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsCrmSource' + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsEntity' + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Dynamics' + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsSink' + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsSource' + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Eloqua' + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'EloquaObject' + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'EloquaSource' + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion + self.type = 'ExecutePipeline' + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.log_location = log_location + self.type = 'ExecuteSSISPackage' + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = feature_name + self.feature_type = feature_type + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, *, value: str, **kwargs) -> None: + super(Expression, self).__init__(**kwargs) + self.value = value + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.e_tag = None + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: + super(Factory, self).__init__(location=location, tags=tags, **kwargs) + self.additional_properties = additional_properties + self.identity = identity + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = repo_configuration + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = account_name + self.repository_name = repository_name + self.collaboration_branch = collaboration_branch + self.root_folder = root_folder + self.last_commit_id = last_commit_id + self.type = None + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: + super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.host_name = host_name + self.type = 'FactoryGitHubConfiguration' + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs) -> None: + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = factory_resource_id + self.repo_configuration = repo_configuration + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, *, tags=None, identity=None, **kwargs) -> None: + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = tags + self.identity = identity + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: + super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.project_name = project_name + self.tenant_id = tenant_id + self.type = 'FactoryVSTSConfiguration' + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'FileServer' + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.file_filter = file_filter + self.compression = compression + self.type = 'FileShare' + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'FileSystemSink' + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'FileSystemSource' + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.items = items + self.condition = condition + self.type = 'Filter' + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities + self.type = 'ForEach' + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'FtpServer' + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.dataset = dataset + self.field_list = field_list + self.type = 'GetMetadata' + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, *, metadata_path: str=None, **kwargs) -> None: + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = metadata_path + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = git_hub_access_code + self.git_hub_client_id = git_hub_client_id + self.git_hub_access_token_base_url = git_hub_access_token_base_url + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = git_hub_access_token + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleBigQuery' + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + database + table properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.dataset = dataset + self.type = 'GoogleBigQueryObject' + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleBigQuerySource' + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Greenplum' + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GreenplumSource' + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.greenplum_table_dataset_schema = greenplum_table_dataset_schema + self.type = 'GreenplumTable' + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'HBase' + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HBaseObject' + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HBaseSource' + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password + self.type = 'Hdfs' + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.distcp_settings = distcp_settings + self.type = 'HdfsSource' + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout + self.type = 'HDInsightHive' + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.file_system = file_system + self.type = 'HDInsight' + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines + self.type = 'HDInsightMapReduce' + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name + self.type = 'HDInsightOnDemand' + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.type = 'HDInsightPig' + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config + self.type = 'HDInsightSpark' + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines + self.type = 'HDInsightStreaming' + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Hive' + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.hive_object_dataset_schema = hive_object_dataset_schema + self.type = 'HiveObject' + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HiveSource' + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: + super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.format = format + self.compression = compression + self.type = 'HttpFile' + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint + self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'HttpServer' + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.type = 'HttpSource' + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Hubspot' + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HubspotObject' + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HubspotSource' + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities + self.type = 'IfCondition' + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Impala' + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.impala_object_dataset_schema = impala_object_dataset_schema + self.type = 'ImpalaObject' + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ImpalaSource' + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Informix' + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'InformixSink' + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'InformixSource' + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'InformixTable' + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.type = None + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = auth_key1 + self.auth_key2 = auth_key2 + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.v_net_properties = v_net_properties + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = name + self.nodes = nodes + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, *, key_name=None, **kwargs) -> None: + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = key_name + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_factory_name = None + self.state = None + self.type = None + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value, next_link: str=None, **kwargs) -> None: + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = properties + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Jira' + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'JiraObject' + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'JiraSource' + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + self.type = 'JsonFormat' + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, *, key, **kwargs) -> None: + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = key + self.authorization_type = 'Key' + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, *, resource_id: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = resource_id + self.authorization_type = 'RBAC' + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, *, linked_factory_name: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = linked_factory_name + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = properties + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: + super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.dataset = dataset + self.first_row_only = first_row_only + self.type = 'Lookup' + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Magento' + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MagentoObject' + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MagentoSource' + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + self.type = 'Managed' + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.time = None + self.code = None + self.parameters = None + self.message = None + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_id = None + self.status = None + self.errors = errors + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'MariaDB' + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MariaDBSource' + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MariaDBTable' + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Marketo' + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MarketoObject' + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MarketoSource' + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MicrosoftAccess' + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MicrosoftAccessSource' + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'MongoDbCollection' + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.authentication_type = authentication_type + self.database_name = database_name + self.username = username + self.password = password + self.auth_source = auth_source + self.port = port + self.enable_ssl = enable_ssl + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'MongoDb' + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MongoDbSource' + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MySql' + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MySqlSource' + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MySqlTable' + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Netezza' + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'NetezzaSource' + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.netezza_table_dataset_schema = netezza_table_dataset_schema + self.type = 'NetezzaTable' + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: + super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password + self.encrypted_credential = encrypted_credential + self.type = 'OData' + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'ODataResource' + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ODataSource' + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Odbc' + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OdbcSink' + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OdbcSource' + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OdbcTable' + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time + self.type = 'Office365Source' + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: + super(Operation, self).__init__(**kwargs) + self.name = name + self.origin = origin + self.display = display + self.service_specification = service_specification + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: + super(OperationDisplay, self).__init__(**kwargs) + self.description = description + self.provider = provider + self.resource = resource + self.operation = operation + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.blob_duration = blob_duration + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = time_grain + self.blob_duration = blob_duration + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.to_be_exported_for_shoebox = to_be_exported_for_shoebox + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.aggregation_type = aggregation_type + self.enable_regional_mdm_account = enable_regional_mdm_account + self.source_mdm_account = source_mdm_account + self.source_mdm_namespace = source_mdm_namespace + self.availabilities = availabilities + self.dimensions = dimensions + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Oracle' + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OracleSink' + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'OracleSource' + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: + super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.oracle_table_dataset_schema = oracle_table_dataset_schema + self.table = table + self.type = 'OracleTable' + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'OrcFormat' + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(ParameterSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'ParquetFormat' + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Paypal' + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PaypalObject' + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PaypalSource' + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Phoenix' + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: + super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.phoenix_object_dataset_schema = phoenix_object_dataset_schema + self.type = 'PhoenixObject' + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PhoenixSource' + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(PipelineFolder, self).__init__(**kwargs) + self.name = name + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.name = name + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.activities = activities + self.parameters = parameters + self.variables = variables + self.concurrency = concurrency + self.annotations = annotations + self.folder = folder + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'PostgreSql' + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PostgreSqlSource' + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema + self.type = 'PostgreSqlTable' + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: + super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.server_version = server_version + self.catalog = catalog + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.time_zone_id = time_zone_id + self.encrypted_credential = encrypted_credential + self.type = 'Presto' + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: + super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.presto_object_dataset_schema = presto_object_dataset_schema + self.type = 'PrestoObject' + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PrestoSource' + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: + super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.company_id = company_id + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = access_token + self.access_token_secret = access_token_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.encrypted_credential = encrypted_credential + self.type = 'QuickBooks' + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'QuickBooksObject' + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'QuickBooksSource' + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'RelationalSource' + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'RelationalTable' + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = properties + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency + self.type = 'RerunTumblingWindowTrigger' + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Responsys' + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ResponsysObject' + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ResponsysSource' + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.last_updated_after = last_updated_after + self.last_updated_before = last_updated_before + self.filters = filters + self.order_by = order_by + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, *, operand, operator, values, **kwargs) -> None: + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = operand + self.operator = operator + self.values = values + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, *, order_by, order, **kwargs) -> None: + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = order_by + self.order = order + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.encrypted_credential = encrypted_credential + self.type = 'Salesforce' + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceMarketingCloud' + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SalesforceMarketingCloudObject' + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SalesforceMarketingCloudSource' + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceObject' + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceServiceCloud' + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceServiceCloudObject' + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceServiceCloudSink' + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceServiceCloudSource' + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceSink' + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceSource' + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapBwCube' + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapBW' + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapBwSource' + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapCloudForCustomer' + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapCloudForCustomerResource' + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'SapCloudForCustomerSink' + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapCloudForCustomerSource' + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: + super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapEcc' + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapEccResource' + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapEccSource' + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapHana' + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.packet_size = packet_size + self.type = 'SapHanaSource' + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table + self.type = 'SapHanaTable' + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubSource' + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential + self.type = 'SapTable' + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SapTableResource' + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'SapTableSource' + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.recurrence = recurrence + self.type = 'ScheduleTrigger' + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: + super(ScriptAction, self).__init__(**kwargs) + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, value: str, **kwargs) -> None: + super(SecureString, self).__init__(**kwargs) + self.value = value + self.type = 'SecureString' + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = offset + self.size = size + self.type = 'SelfDependencyTumblingWindowTriggerReference' + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.linked_info = linked_info + self.type = 'SelfHosted' + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = nodes + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = links + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.authentication_type = authentication_type + self.username = username + self.password = password + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'ServiceNow' + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ServiceNowObject' + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ServiceNowSource' + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'SetVariable' + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: + super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.private_key_path = private_key_path + self.private_key_content = private_key_content + self.pass_phrase = pass_phrase + self.skip_host_key_validation = skip_host_key_validation + self.host_key_fingerprint = host_key_fingerprint + self.type = 'Sftp' + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Shopify' + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ShopifyObject' + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ShopifySource' + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Spark' + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: + super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.spark_object_dataset_schema = spark_object_dataset_schema + self.type = 'SparkObject' + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SparkSource' + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.type = 'SqlDWSink' + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlDWSource' + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlMISink' + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlMISource' + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SqlServer' + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlServerSink' + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlServerStoredProcedure' + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.sql_server_table_dataset_schema = sql_server_table_dataset_schema + self.table = table + self.type = 'SqlServerTable' + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlSink' + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlSource' + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Square' + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SquareObject' + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SquareSource' + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.type = None + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, value, **kwargs) -> None: + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = value + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = status + self.name = name + self.properties = properties + self.error = error + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + } + + def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = value + self.is_sensitive = is_sensitive + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + self.enable_compression = enable_compression + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, value=None, type=None, **kwargs) -> None: + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = value + self.type = type + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.schema = schema + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Sybase' + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SybaseSource' + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SybaseTable' + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Teradata' + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable' + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header + self.type = 'TextFormat' + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, *, reference_trigger, **kwargs) -> None: + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = reference_trigger + self.type = 'TriggerDependencyReference' + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = pipeline_reference + self.parameters = parameters + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, *, reference_name: str, **kwargs) -> None: + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = reference_name + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(TriggerResource, self).__init__(**kwargs) + self.properties = properties + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on + self.type = 'TumblingWindowTrigger' + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.offset = offset + self.size = size + self.type = 'TumblingWindowTriggerDependencyReference' + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: + super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.timeout = timeout + self.activities = activities + self.type = 'Until' + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = concurrent_jobs_limit + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = auto_update + self.update_delay_offset = update_delay_offset + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = permissions + self.access_resource_path = access_resource_path + self.profile_name = profile_name + self.start_time = start_time + self.expire_time = expire_time + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, name: str, value, **kwargs) -> None: + super(UserProperty, self).__init__(**kwargs) + self.name = name + self.value = value + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(VariableSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Vertica' + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'VerticaSource' + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: + super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.vertica_table_dataset_schema = vertica_table_dataset_schema + self.type = 'VerticaTable' + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.wait_time_in_seconds = wait_time_in_seconds + self.type = 'Wait' + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: + super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.url = url + self.headers = headers + self.body = body + self.authentication = authentication + self.datasets = datasets + self.linked_services = linked_services + self.type = 'WebActivity' + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = type + self.pfx = pfx + self.username = username + self.password = password + self.resource = resource + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = url + self.authentication_type = None + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) + self.authentication_type = 'Anonymous' + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, username, password, **kwargs) -> None: + super(WebBasicAuthentication, self).__init__(url=url, **kwargs) + self.username = username + self.password = password + self.authentication_type = 'Basic' + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, pfx, password, **kwargs) -> None: + super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) + self.pfx = pfx + self.password = password + self.authentication_type = 'ClientCertificate' + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'Web' + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'WebSource' + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index = index + self.path = path + self.type = 'WebTable' + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.consumer_key = consumer_key + self.private_key = private_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Xero' + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'XeroObject' + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'XeroSource' + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Zoho' + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ZohoObject' + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py new file mode 100644 index 000000000000..4092d2143a7c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py @@ -0,0 +1,118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class OperationPaged(Paged): + """ + A paging container for iterating over a list of :class:`Operation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Operation]'} + } + + def __init__(self, *args, **kwargs): + + super(OperationPaged, self).__init__(*args, **kwargs) +class FactoryPaged(Paged): + """ + A paging container for iterating over a list of :class:`Factory ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Factory]'} + } + + def __init__(self, *args, **kwargs): + + super(FactoryPaged, self).__init__(*args, **kwargs) +class IntegrationRuntimeResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`IntegrationRuntimeResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[IntegrationRuntimeResource]'} + } + + def __init__(self, *args, **kwargs): + + super(IntegrationRuntimeResourcePaged, self).__init__(*args, **kwargs) +class LinkedServiceResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`LinkedServiceResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[LinkedServiceResource]'} + } + + def __init__(self, *args, **kwargs): + + super(LinkedServiceResourcePaged, self).__init__(*args, **kwargs) +class DatasetResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DatasetResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DatasetResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DatasetResourcePaged, self).__init__(*args, **kwargs) +class PipelineResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`PipelineResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PipelineResource]'} + } + + def __init__(self, *args, **kwargs): + + super(PipelineResourcePaged, self).__init__(*args, **kwargs) +class TriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`TriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[TriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(TriggerResourcePaged, self).__init__(*args, **kwargs) +class RerunTriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`RerunTriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py deleted file mode 100644 index 033d0fd9591f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class AccessPolicyResponse(Model): - """Get Data Plane read only token response definition. - - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str - """ - - _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = kwargs.get('policy', None) - self.access_token = kwargs.get('access_token', None) - self.data_plane_url = kwargs.get('data_plane_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py deleted file mode 100644 index 2932f547ff26..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class AccessPolicyResponse(Model): - """Get Data Plane read only token response definition. - - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str - """ - - _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, - } - - def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = policy - self.access_token = access_token - self.data_plane_url = data_plane_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py deleted file mode 100644 index 72d920f1d04c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Activity(Model): - """A pipeline activity. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ExecutionActivity, ControlActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} - } - - def __init__(self, **kwargs): - super(Activity, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.depends_on = kwargs.get('depends_on', None) - self.user_properties = kwargs.get('user_properties', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py deleted file mode 100644 index a15b34acc24f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityDependency(Model): - """Activity dependency information. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the - dependency. - :type dependency_conditions: list[str or - ~azure.mgmt.datafactory.models.DependencyCondition] - """ - - _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, - } - - def __init__(self, **kwargs): - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.activity = kwargs.get('activity', None) - self.dependency_conditions = kwargs.get('dependency_conditions', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py deleted file mode 100644 index 2883a81a0adc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityDependency(Model): - """Activity dependency information. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the - dependency. - :type dependency_conditions: list[str or - ~azure.mgmt.datafactory.models.DependencyCondition] - """ - - _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, - } - - def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.activity = activity - self.dependency_conditions = dependency_conditions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py deleted file mode 100644 index 4475cdbd9bea..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityPolicy(Model): - """Execution policy for an activity. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param timeout: Specifies the timeout for the activity to run. The default - timeout is 7 days. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type retry: object - :param retry_interval_in_seconds: Interval between each retry attempt (in - seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered - as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered - as secure and will not be logged to monitoring. - :type secure_output: bool - """ - - _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.timeout = kwargs.get('timeout', None) - self.retry = kwargs.get('retry', None) - self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) - self.secure_input = kwargs.get('secure_input', None) - self.secure_output = kwargs.get('secure_output', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py deleted file mode 100644 index 52d469679974..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityPolicy(Model): - """Execution policy for an activity. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param timeout: Specifies the timeout for the activity to run. The default - timeout is 7 days. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type retry: object - :param retry_interval_in_seconds: Interval between each retry attempt (in - seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered - as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered - as secure and will not be logged to monitoring. - :type secure_output: bool - """ - - _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, - } - - def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.timeout = timeout - self.retry = retry - self.retry_interval_in_seconds = retry_interval_in_seconds - self.secure_input = secure_input - self.secure_output = secure_output diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py deleted file mode 100644 index b5997c9352e1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Activity(Model): - """A pipeline activity. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ExecutionActivity, ControlActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(Activity, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.name = name - self.description = description - self.depends_on = depends_on - self.user_properties = user_properties - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py deleted file mode 100644 index 901ffe23cd4e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityRun(Model): - """Information about an activity run in a pipeline. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar pipeline_name: The name of the pipeline. - :vartype pipeline_name: str - :ivar pipeline_run_id: The id of the pipeline run. - :vartype pipeline_run_id: str - :ivar activity_name: The name of the activity. - :vartype activity_name: str - :ivar activity_type: The type of the activity. - :vartype activity_type: str - :ivar activity_run_id: The id of the activity run. - :vartype activity_run_id: str - :ivar linked_service_name: The name of the compute linked service. - :vartype linked_service_name: str - :ivar status: The status of the activity run. - :vartype status: str - :ivar activity_run_start: The start time of the activity run in 'ISO 8601' - format. - :vartype activity_run_start: datetime - :ivar activity_run_end: The end time of the activity run in 'ISO 8601' - format. - :vartype activity_run_end: datetime - :ivar duration_in_ms: The duration of the activity run. - :vartype duration_in_ms: int - :ivar input: The input for the activity. - :vartype input: object - :ivar output: The output for the activity. - :vartype output: object - :ivar error: The error if any from the activity run. - :vartype error: object - """ - - _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.pipeline_name = None - self.pipeline_run_id = None - self.activity_name = None - self.activity_type = None - self.activity_run_id = None - self.linked_service_name = None - self.status = None - self.activity_run_start = None - self.activity_run_end = None - self.duration_in_ms = None - self.input = None - self.output = None - self.error = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py deleted file mode 100644 index 488e822de957..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityRun(Model): - """Information about an activity run in a pipeline. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar pipeline_name: The name of the pipeline. - :vartype pipeline_name: str - :ivar pipeline_run_id: The id of the pipeline run. - :vartype pipeline_run_id: str - :ivar activity_name: The name of the activity. - :vartype activity_name: str - :ivar activity_type: The type of the activity. - :vartype activity_type: str - :ivar activity_run_id: The id of the activity run. - :vartype activity_run_id: str - :ivar linked_service_name: The name of the compute linked service. - :vartype linked_service_name: str - :ivar status: The status of the activity run. - :vartype status: str - :ivar activity_run_start: The start time of the activity run in 'ISO 8601' - format. - :vartype activity_run_start: datetime - :ivar activity_run_end: The end time of the activity run in 'ISO 8601' - format. - :vartype activity_run_end: datetime - :ivar duration_in_ms: The duration of the activity run. - :vartype duration_in_ms: int - :ivar input: The input for the activity. - :vartype input: object - :ivar output: The output for the activity. - :vartype output: object - :ivar error: The error if any from the activity run. - :vartype error: object - """ - - _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.pipeline_name = None - self.pipeline_run_id = None - self.activity_name = None - self.activity_type = None - self.activity_run_id = None - self.linked_service_name = None - self.status = None - self.activity_run_start = None - self.activity_run_end = None - self.duration_in_ms = None - self.input = None - self.output = None - self.error = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py deleted file mode 100644 index 2fcd25a5ced2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityRunsQueryResponse(Model): - """A list activity runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py deleted file mode 100644 index ee3eae141635..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityRunsQueryResponse(Model): - """A list activity runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py deleted file mode 100644 index b1e5ed533bba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AmazonMWSLinkedService(LinkedService): - """Amazon Marketplace Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com) - :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to - retrieve data from. To retrieve data from multiple Marketplace IDs, - separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) - :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. - :type seller_id: object - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonMWSLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.marketplace_id = kwargs.get('marketplace_id', None) - self.seller_id = kwargs.get('seller_id', None) - self.mws_auth_token = kwargs.get('mws_auth_token', None) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_key = kwargs.get('secret_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py deleted file mode 100644 index a8db63933154..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AmazonMWSLinkedService(LinkedService): - """Amazon Marketplace Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com) - :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to - retrieve data from. To retrieve data from multiple Marketplace IDs, - separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) - :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. - :type seller_id: object - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.marketplace_id = marketplace_id - self.seller_id = seller_id - self.mws_auth_token = mws_auth_token - self.access_key_id = access_key_id - self.secret_key = secret_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py deleted file mode 100644 index 9885f5c77d8c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AmazonMWSObjectDataset(Dataset): - """Amazon Marketplace Web Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonMWSObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py deleted file mode 100644 index 015ed9401c15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AmazonMWSObjectDataset(Dataset): - """Amazon Marketplace Web Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py deleted file mode 100644 index f9d034e610d4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AmazonMWSSource(CopySource): - """A copy activity Amazon Marketplace Web Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonMWSSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py deleted file mode 100644 index 9ef7f5b30244..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AmazonMWSSource(CopySource): - """A copy activity Amazon Marketplace Web Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py deleted file mode 100644 index 4272b28c13f5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AmazonRedshiftLinkedService(LinkedService): - """Linked service for Amazon Redshift. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The name of the Amazon Redshift server. Type: - string (or Expression with resultType string). - :type server: object - :param username: The username of the Amazon Redshift source. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift - source. Type: string (or Expression with resultType string). - :type database: object - :param port: The TCP port number that the Amazon Redshift server uses to - listen for client connections. The default value is 5439. Type: integer - (or Expression with resultType integer). - :type port: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.database = kwargs.get('database', None) - self.port = kwargs.get('port', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py deleted file mode 100644 index 3b84583c6c86..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AmazonRedshiftLinkedService(LinkedService): - """Linked service for Amazon Redshift. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The name of the Amazon Redshift server. Type: - string (or Expression with resultType string). - :type server: object - :param username: The username of the Amazon Redshift source. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift - source. Type: string (or Expression with resultType string). - :type database: object - :param port: The TCP port number that the Amazon Redshift server uses to - listen for client connections. The default value is 5439. Type: integer - (or Expression with resultType integer). - :type port: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.username = username - self.password = password - self.database = database - self.port = port - self.encrypted_credential = encrypted_credential - self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py deleted file mode 100644 index d4fdfa4aa2ba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AmazonRedshiftSource(CopySource): - """A copy activity source for Amazon Redshift Source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param redshift_unload_settings: The Amazon S3 settings needed for the - interim Amazon S3 when copying from Amazon Redshift with unload. With - this, data from Amazon Redshift source will be unloaded into S3 first and - then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: - ~azure.mgmt.datafactory.models.RedshiftUnloadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, - } - - def __init__(self, **kwargs): - super(AmazonRedshiftSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) - self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py deleted file mode 100644 index 9b34b2ef5b97..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AmazonRedshiftSource(CopySource): - """A copy activity source for Amazon Redshift Source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param redshift_unload_settings: The Amazon S3 settings needed for the - interim Amazon S3 when copying from Amazon Redshift with unload. With - this, data from Amazon Redshift source will be unloaded into S3 first and - then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: - ~azure.mgmt.datafactory.models.RedshiftUnloadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.redshift_unload_settings = redshift_unload_settings - self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py deleted file mode 100644 index e91a5ba26131..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AmazonS3Dataset(Dataset): - """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: - string (or Expression with resultType string). - :type bucket_name: object - :param key: The key of the Amazon S3 object. Type: string (or Expression - with resultType string). - :type key: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param version: The version for the S3 object. Type: string (or Expression - with resultType string). - :type version: object - :param modified_datetime_start: The start of S3 object's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of S3 object's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 - object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AmazonS3Dataset, self).__init__(**kwargs) - self.bucket_name = kwargs.get('bucket_name', None) - self.key = kwargs.get('key', None) - self.prefix = kwargs.get('prefix', None) - self.version = kwargs.get('version', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py deleted file mode 100644 index d84ae48b2a46..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AmazonS3Dataset(Dataset): - """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: - string (or Expression with resultType string). - :type bucket_name: object - :param key: The key of the Amazon S3 object. Type: string (or Expression - with resultType string). - :type key: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param version: The version for the S3 object. Type: string (or Expression - with resultType string). - :type version: object - :param modified_datetime_start: The start of S3 object's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of S3 object's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 - object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: - super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.bucket_name = bucket_name - self.key = key - self.prefix = prefix - self.version = version - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression - self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py deleted file mode 100644 index 250518c1a7ec..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AmazonS3LinkedService(LinkedService): - """Linked service for Amazon S3. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param access_key_id: The access key identifier of the Amazon S3 Identity - and Access Management (IAM) user. Type: string (or Expression with - resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Identity - and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the - S3 Connector. This is an optional property; change it only if you want to - try a different service endpoint or want to switch between https and http. - Type: string (or Expression with resultType string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonS3LinkedService, self).__init__(**kwargs) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py deleted file mode 100644 index 8d136bb71fc0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AmazonS3LinkedService(LinkedService): - """Linked service for Amazon S3. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param access_key_id: The access key identifier of the Amazon S3 Identity - and Access Management (IAM) user. Type: string (or Expression with - resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Identity - and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the - S3 Connector. This is an optional property; change it only if you want to - try a different service endpoint or want to switch between https and http. - Type: string (or Expression with resultType string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.service_url = service_url - self.encrypted_credential = encrypted_credential - self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py deleted file mode 100644 index 74c77a16f0f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class AmazonS3Location(DatasetLocation): - """The location of amazon S3 dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or - Expression with resultType string) - :type bucket_name: object - :param version: Specify the version of amazon S3. Type: string (or - Expression with resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonS3Location, self).__init__(**kwargs) - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py deleted file mode 100644 index 36afce341ada..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AmazonS3Location(DatasetLocation): - """The location of amazon S3 dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or - Expression with resultType string) - :type bucket_name: object - :param version: Specify the version of amazon S3. Type: string (or - Expression with resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: - super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.bucket_name = bucket_name - self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py deleted file mode 100644 index e83910136070..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonS3ReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py deleted file mode 100644 index 79645a869ac8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.prefix = prefix - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py deleted file mode 100644 index 36a25e959061..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class AppendVariableActivity(ControlActivity): - """Append value for a Variable of type Array. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be - appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AppendVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py deleted file mode 100644 index 4526a6e4a45e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class AppendVariableActivity(ControlActivity): - """Append value for a Variable of type Array. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be - appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.variable_name = variable_name - self.value = value - self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py deleted file mode 100644 index d206ac99ab85..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AvroDataset(Dataset): - """Avro dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: 'none', 'deflate', - 'snappy', 'xz', 'bzip2' - :type avro_compression_codec: str or - ~azure.mgmt.datafactory.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(AvroDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.avro_compression_codec = kwargs.get('avro_compression_codec', None) - self.avro_compression_level = kwargs.get('avro_compression_level', None) - self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py deleted file mode 100644 index f0f44dbbd786..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AvroDataset(Dataset): - """Avro dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: 'none', 'deflate', - 'snappy', 'xz', 'bzip2' - :type avro_compression_codec: str or - ~azure.mgmt.datafactory.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.avro_compression_codec = avro_compression_codec - self.avro_compression_level = avro_compression_level - self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py deleted file mode 100644 index f0346a76080c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format import DatasetStorageFormat - - -class AvroFormat(DatasetStorageFormat): - """The data stored in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AvroFormat, self).__init__(**kwargs) - self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py deleted file mode 100644 index 35d459c4b2a6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class AvroFormat(DatasetStorageFormat): - """The data stored in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py deleted file mode 100644 index 34d4ceb1e0f6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AvroSink(CopySink): - """A copy activity Avro sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, - } - - def __init__(self, **kwargs): - super(AvroSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py deleted file mode 100644 index 16363092dff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AvroSink(CopySink): - """A copy activity Avro sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py deleted file mode 100644 index 3ea2e7a2a76f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AvroSource(CopySource): - """A copy activity Avro source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(AvroSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py deleted file mode 100644 index 74b5e6db0fe2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AvroSource(CopySource): - """A copy activity Avro source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py deleted file mode 100644 index ec068ee29885..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings import FormatWriteSettings - - -class AvroWriteSettings(FormatWriteSettings): - """Avro write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param record_name: Top level record name in write result, which is - required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AvroWriteSettings, self).__init__(**kwargs) - self.record_name = kwargs.get('record_name', None) - self.record_namespace = kwargs.get('record_namespace', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py deleted file mode 100644 index d14ebc4d1d29..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings_py3 import FormatWriteSettings - - -class AvroWriteSettings(FormatWriteSettings): - """Avro write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param record_name: Top level record name in write result, which is - required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: - super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.record_name = record_name - self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py deleted file mode 100644 index 986023308e23..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Batch account name. Type: string - (or Expression with resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or - Expression with resultType string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or - Expression with resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBatchLinkedService, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.access_key = kwargs.get('access_key', None) - self.batch_uri = kwargs.get('batch_uri', None) - self.pool_name = kwargs.get('pool_name', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py deleted file mode 100644 index e7d33dfb342a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Batch account name. Type: string - (or Expression with resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or - Expression with resultType string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or - Expression with resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.account_name = account_name - self.access_key = access_key - self.batch_uri = batch_uri - self.pool_name = pool_name - self.linked_service_name = linked_service_name - self.encrypted_credential = encrypted_credential - self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py deleted file mode 100644 index 01814cf8f9a9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureBlobDataset(Dataset): - """The Azure Blob storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Blob storage. Type: string (or - Expression with resultType string). - :type folder_path: object - :param table_root_location: The root of blob path. Type: string (or - Expression with resultType string). - :type table_root_location: object - :param file_name: The name of the Azure Blob. Type: string (or Expression - with resultType string). - :type file_name: object - :param modified_datetime_start: The start of Azure Blob's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of Azure Blob's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AzureBlobDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.table_root_location = kwargs.get('table_root_location', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py deleted file mode 100644 index 706c39deb289..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureBlobDataset(Dataset): - """The Azure Blob storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Blob storage. Type: string (or - Expression with resultType string). - :type folder_path: object - :param table_root_location: The root of blob path. Type: string (or - Expression with resultType string). - :type table_root_location: object - :param file_name: The name of the Azure Blob. Type: string (or Expression - with resultType string). - :type file_name: object - :param modified_datetime_start: The start of Azure Blob's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of Azure Blob's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: - super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.table_root_location = table_root_location - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression - self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py deleted file mode 100644 index 0ef62ff7122f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureBlobFSDataset(Dataset): - """The Azure Data Lake Storage Gen2 storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. - Type: string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py deleted file mode 100644 index 82136a683fd3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureBlobFSDataset(Dataset): - """The Azure Data Lake Storage Gen2 storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. - Type: string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: - super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression - self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py deleted file mode 100644 index 262ce976227b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. Type: - string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py deleted file mode 100644 index f0d555078bf7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. Type: - string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.account_key = account_key - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py deleted file mode 100644 index c21525bbac4c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class AzureBlobFSLocation(DatasetLocation): - """The location of azure blobFS dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param file_system: Specify the fileSystem of azure blobFS. Type: string - (or Expression with resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSLocation, self).__init__(**kwargs) - self.file_system = kwargs.get('file_system', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py deleted file mode 100644 index afbae52fdeb0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AzureBlobFSLocation(DatasetLocation): - """The location of azure blobFS dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param file_system: Specify the fileSystem of azure blobFS. Type: string - (or Expression with resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: - super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py deleted file mode 100644 index 6d80ce72ea57..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class AzureBlobFSReadSettings(StoreReadSettings): - """Azure blobFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py deleted file mode 100644 index af4746e84f8e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AzureBlobFSReadSettings(StoreReadSettings): - """Azure blobFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py deleted file mode 100644 index a47b173c6581..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureBlobFSSink(CopySink): - """A copy activity Azure Data Lake Storage Gen2 sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py deleted file mode 100644 index e2b28bf30a8c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureBlobFSSink(CopySink): - """A copy activity Azure Data Lake Storage Gen2 sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py deleted file mode 100644 index 0252ffd5ba8f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureBlobFSSource(CopySource): - """A copy activity Azure BlobFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSSource, self).__init__(**kwargs) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) - self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py deleted file mode 100644 index 5b512c1f334f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureBlobFSSource(CopySource): - """A copy activity Azure BlobFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive - self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py deleted file mode 100644 index 5de93c10a1f8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings import StoreWriteSettings - - -class AzureBlobFSWriteSettings(StoreWriteSettings): - """Azure blobFS write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py deleted file mode 100644 index be0008fd8733..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class AzureBlobFSWriteSettings(StoreWriteSettings): - """Azure blobFS write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py deleted file mode 100644 index 5246e02ab9b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py +++ /dev/null @@ -1,104 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually - exclusive with connectionString, serviceEndpoint property. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage - resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.service_endpoint = kwargs.get('service_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py deleted file mode 100644 index ba0a511532b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py +++ /dev/null @@ -1,104 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually - exclusive with connectionString, serviceEndpoint property. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage - resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.service_endpoint = service_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py deleted file mode 100644 index 1efbbeaec352..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class AzureBlobStorageLocation(DatasetLocation): - """The location of azure blob dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param container: Specify the container of azure blob. Type: string (or - Expression with resultType string). - :type container: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageLocation, self).__init__(**kwargs) - self.container = kwargs.get('container', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py deleted file mode 100644 index 63b122573039..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AzureBlobStorageLocation(DatasetLocation): - """The location of azure blob dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param container: Specify the container of azure blob. Type: string (or - Expression with resultType string). - :type container: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: - super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py deleted file mode 100644 index 42b11cc6de16..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class AzureBlobStorageReadSettings(StoreReadSettings): - """Azure blob read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py deleted file mode 100644 index 495ea16afd98..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AzureBlobStorageReadSettings(StoreReadSettings): - """Azure blob read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py deleted file mode 100644 index 08becf208a3d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings import StoreWriteSettings - - -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py deleted file mode 100644 index 40f2e0103693..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py deleted file mode 100644 index 308d445d1726..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data - Explorer command syntax. Type: string (or Expression with resultType - string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) - :type command_timeout: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.command = kwargs.get('command', None) - self.command_timeout = kwargs.get('command_timeout', None) - self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py deleted file mode 100644 index 2f04dfddf08f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data - Explorer command syntax. Type: string (or Expression with resultType - string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) - :type command_timeout: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, - } - - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.command = command - self.command_timeout = command_timeout - self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py deleted file mode 100644 index 5e5a9f7560c6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of Azure Data Explorer (the - engine's endpoint). URL will be in the format - https://..kusto.windows.net. Type: string (or - Expression with resultType string) - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.database = kwargs.get('database', None) - self.tenant = kwargs.get('tenant', None) - self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py deleted file mode 100644 index 3cd8ab9c3c19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of Azure Data Explorer (the - engine's endpoint). URL will be in the format - https://..kusto.windows.net. Type: string (or - Expression with resultType string) - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - } - - def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant - self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py deleted file mode 100644 index 5c204ab769e4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param ingestion_mapping_name: A name of a pre-created csv mapping that - was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object - :param ingestion_mapping_as_json: An explicit column mapping description - provided in a json format. Type: string. - :type ingestion_mapping_as_json: object - :param flush_immediately: If set to true, any aggregation will be skipped. - Default is false. Type: boolean. - :type flush_immediately: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerSink, self).__init__(**kwargs) - self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) - self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) - self.flush_immediately = kwargs.get('flush_immediately', None) - self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py deleted file mode 100644 index e5cb67bc79b8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param ingestion_mapping_name: A name of a pre-created csv mapping that - was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object - :param ingestion_mapping_as_json: An explicit column mapping description - provided in a json format. Type: string. - :type ingestion_mapping_as_json: object - :param flush_immediately: If set to true, any aggregation will be skipped. - Default is false. Type: boolean. - :type flush_immediately: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ingestion_mapping_name = ingestion_mapping_name - self.ingestion_mapping_as_json = ingestion_mapping_as_json - self.flush_immediately = flush_immediately - self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py deleted file mode 100644 index 2caaa517efd5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureDataExplorerSource(CopySource): - """A copy activity Azure Data Explorer (Kusto) source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Required. Database query. Should be a Kusto Query Language - (KQL) query. Type: string (or Expression with resultType string). - :type query: object - :param no_truncation: The name of the Boolean option that controls whether - truncation is applied to result-sets that go beyond a certain row-count - limit. - :type no_truncation: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object - """ - - _validation = { - 'type': {'required': True}, - 'query': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.no_truncation = kwargs.get('no_truncation', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py deleted file mode 100644 index 55a6bc78ee04..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureDataExplorerSource(CopySource): - """A copy activity Azure Data Explorer (Kusto) source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Required. Database query. Should be a Kusto Query Language - (KQL) query. Type: string (or Expression with resultType string). - :type query: object - :param no_truncation: The name of the Boolean option that controls whether - truncation is applied to result-sets that go beyond a certain row-count - limit. - :type no_truncation: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object - """ - - _validation = { - 'type': {'required': True}, - 'query': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - } - - def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.no_truncation = no_truncation - self.query_timeout = query_timeout - self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py deleted file mode 100644 index 594d22171f48..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table: The table name of the Azure Data Explorer database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.table = kwargs.get('table', None) - self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py deleted file mode 100644 index d36b0f39c2fe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table: The table name of the Azure Data Explorer database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table = table - self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py deleted file mode 100644 index 0381e1b1de65..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Data Lake Analytics account name. - Type: string (or Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Analytics account. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group - name (if different from Data Factory account). Type: string (or Expression - with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string - (or Expression with resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py deleted file mode 100644 index 93250e2cef76..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Data Lake Analytics account name. - Type: string (or Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Analytics account. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group - name (if different from Data Factory account). Type: string (or Expression - with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string - (or Expression with resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.account_name = account_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.data_lake_analytics_uri = data_lake_analytics_uri - self.encrypted_credential = encrypted_credential - self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py deleted file mode 100644 index de15057f78ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureDataLakeStoreDataset(Dataset): - """Azure Data Lake Store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the file in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in - the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py deleted file mode 100644 index d2df0ffebe7e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureDataLakeStoreDataset(Dataset): - """Azure Data Lake Store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the file in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in - the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: - super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression - self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py deleted file mode 100644 index f08e086cb500..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: - string (or Expression with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Store account. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name - (if different from Data Factory account). Type: string (or Expression with - resultType string). - :type resource_group_name: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.account_name = kwargs.get('account_name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py deleted file mode 100644 index 7b8ab293c0cf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: - string (or Expression with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Store account. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name - (if different from Data Factory account). Type: string (or Expression with - resultType string). - :type resource_group_name: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.data_lake_store_uri = data_lake_store_uri - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.account_name = account_name - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.encrypted_credential = encrypted_credential - self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py deleted file mode 100644 index a4bf521a2005..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class AzureDataLakeStoreLocation(DatasetLocation): - """The location of azure data lake store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py deleted file mode 100644 index e7955731fc31..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AzureDataLakeStoreLocation(DatasetLocation): - """The location of azure data lake store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py deleted file mode 100644 index 213d69966baf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class AzureDataLakeStoreReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py deleted file mode 100644 index b4bccc5e78a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AzureDataLakeStoreReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py deleted file mode 100644 index e882698c2ca6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureDataLakeStoreSink(CopySink): - """A copy activity Azure Data Lake Store sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) - self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py deleted file mode 100644 index 0f96cea725e2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureDataLakeStoreSink(CopySink): - """A copy activity Azure Data Lake Store sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.enable_adls_single_file_parallel = enable_adls_single_file_parallel - self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py deleted file mode 100644 index 9d2046049a30..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureDataLakeStoreSource(CopySource): - """A copy activity Azure Data Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py deleted file mode 100644 index e1d883972220..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureDataLakeStoreSource(CopySource): - """A copy activity Azure Data Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py deleted file mode 100644 index a372606d86ec..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings import StoreWriteSettings - - -class AzureDataLakeStoreWriteSettings(StoreWriteSettings): - """Azure data lake store write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py deleted file mode 100644 index a48cade879c6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class AzureDataLakeStoreWriteSettings(StoreWriteSettings): - """Azure data lake store write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py deleted file mode 100644 index 6cc4c12674cb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py +++ /dev/null @@ -1,126 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param domain: Required. .azuredatabricks.net, domain name of your - Databricks deployment. Type: string (or Expression with resultType - string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: - string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param existing_cluster_id: The id of an existing cluster that will be - used for all runs of this job. Type: string (or Expression with resultType - string). - :type existing_cluster_id: object - :param new_cluster_version: The Spark version of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: Number of worker nodes that new cluster - should have. A string formatted Int32, like '1' means numOfWorker is 1 or - '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or - Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node types of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark - configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark - environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new - cluster. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for - the new cluster. Type: array of strings (or Expression with resultType - array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new - cluster. Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.access_token = kwargs.get('access_token', None) - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py deleted file mode 100644 index 6299dac1e3f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py +++ /dev/null @@ -1,126 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param domain: Required. .azuredatabricks.net, domain name of your - Databricks deployment. Type: string (or Expression with resultType - string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: - string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param existing_cluster_id: The id of an existing cluster that will be - used for all runs of this job. Type: string (or Expression with resultType - string). - :type existing_cluster_id: object - :param new_cluster_version: The Spark version of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: Number of worker nodes that new cluster - should have. A string formatted Int32, like '1' means numOfWorker is 1 or - '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or - Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node types of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark - configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark - environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new - cluster. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for - the new cluster. Type: array of strings (or Expression with resultType - array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new - cluster. Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential - self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py deleted file mode 100644 index 68b02e5f771f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' - :type method: str or - ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure - Function Activity will call. Type: string (or Expression with resultType - string) - :type function_name: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureFunctionActivity, self).__init__(**kwargs) - self.method = kwargs.get('method', None) - self.function_name = kwargs.get('function_name', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py deleted file mode 100644 index 95bb1ca260e7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' - :type method: str or - ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure - Function Activity will call. Type: string (or Expression with resultType - string) - :type function_name: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - } - - def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.method = method - self.function_name = function_name - self.headers = headers - self.body = body - self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py deleted file mode 100644 index 2ed5b870a778..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param function_app_url: Required. The endpoint of the Azure Function App. - URL will be in the format https://.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.function_app_url = kwargs.get('function_app_url', None) - self.function_key = kwargs.get('function_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py deleted file mode 100644 index a1bfdbe8b6c1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param function_app_url: Required. The endpoint of the Azure Function App. - URL will be in the format https://.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.function_app_url = function_app_url - self.function_key = function_key - self.encrypted_credential = encrypted_credential - self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py deleted file mode 100644 index 768f0d83ae93..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType - string). - :type base_url: object - """ - - _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.base_url = kwargs.get('base_url', None) - self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py deleted file mode 100644 index 50f4a58a5a1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType - string). - :type base_url: object - """ - - _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - } - - def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.base_url = base_url - self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py deleted file mode 100644 index 28d3e7d31cee..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .secret_base import SecretBase - - -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. - Type: string (or Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The - default value is the latest version of the secret. Type: string (or - Expression with resultType string). - :type secret_version: object - """ - - _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.store = kwargs.get('store', None) - self.secret_name = kwargs.get('secret_name', None) - self.secret_version = kwargs.get('secret_version', None) - self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py deleted file mode 100644 index c5fe4c7afbd4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .secret_base_py3 import SecretBase - - -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. - Type: string (or Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The - default value is the latest version of the secret. Type: string (or - Expression with resultType string). - :type secret_version: object - """ - - _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, - } - - def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.store = store - self.secret_name = secret_name - self.secret_version = secret_version - self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py deleted file mode 100644 index d2dc7db88851..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py deleted file mode 100644 index c80015ed6b45..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py deleted file mode 100644 index 229e6f4311e3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureMariaDBSource(CopySource): - """A copy activity Azure MariaDB source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMariaDBSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py deleted file mode 100644 index 11358f899e51..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureMariaDBSource(CopySource): - """A copy activity Azure MariaDB source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py deleted file mode 100644 index a06c722279f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py deleted file mode 100644 index 9c6fd648af20..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py deleted file mode 100644 index f6c7c75a1299..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML - Batch Execution Service endpoint. Keys must match the names of web service - parameters defined in the published Azure ML web service. Values will be - passed in the GlobalParameters property of the Azure ML batch execution - request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying - the output Blob locations. This information will be passed in the - WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying - the input Blob locations.. This information will be passed in the - WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, - } - - def __init__(self, **kwargs): - super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) - self.global_parameters = kwargs.get('global_parameters', None) - self.web_service_outputs = kwargs.get('web_service_outputs', None) - self.web_service_inputs = kwargs.get('web_service_inputs', None) - self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py deleted file mode 100644 index e273c0b38128..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML - Batch Execution Service endpoint. Keys must match the names of web service - parameters defined in the published Azure ML web service. Values will be - passed in the GlobalParameters property of the Azure ML batch execution - request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying - the output Blob locations. This information will be passed in the - WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying - the input Blob locations.. This information will be passed in the - WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.global_parameters = global_parameters - self.web_service_outputs = web_service_outputs - self.web_service_inputs = web_service_inputs - self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py deleted file mode 100644 index 08dfec98a6bf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureMLLinkedService(LinkedService): - """Azure ML Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML - Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model - endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure - ML Web Service endpoint. Type: string (or Expression with resultType - string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMLLinkedService, self).__init__(**kwargs) - self.ml_endpoint = kwargs.get('ml_endpoint', None) - self.api_key = kwargs.get('api_key', None) - self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py deleted file mode 100644 index c77a692adc03..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureMLLinkedService(LinkedService): - """Azure ML Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML - Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model - endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure - ML Web Service endpoint. Type: string (or Expression with resultType - string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.ml_endpoint = ml_endpoint - self.api_key = api_key - self.update_resource_endpoint = update_resource_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py deleted file mode 100644 index c47a2d81648e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in - the Web Service experiment to be updated. Type: string (or Expression with - resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage - linked service holding the .ilearner file that will be uploaded by the - update operation. - :type trained_model_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in - trainedModelLinkedService to represent the .ilearner file that will be - uploaded by the update operation. Type: string (or Expression with - resultType string). - :type trained_model_file_path: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) - self.trained_model_name = kwargs.get('trained_model_name', None) - self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) - self.trained_model_file_path = kwargs.get('trained_model_file_path', None) - self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py deleted file mode 100644 index 50a5932f0bf0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in - the Web Service experiment to be updated. Type: string (or Expression with - resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage - linked service holding the .ilearner file that will be uploaded by the - update operation. - :type trained_model_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in - trainedModelLinkedService to represent the .ilearner file that will be - uploaded by the update operation. Type: string (or Expression with - resultType string). - :type trained_model_file_path: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, - } - - def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.trained_model_name = trained_model_name - self.trained_model_linked_service_name = trained_model_linked_service_name - self.trained_model_file_path = trained_model_file_path - self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py deleted file mode 100644 index 682b24fed830..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class AzureMLWebServiceFile(Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container - name, in the Azure Blob Storage specified by the LinkedService. Type: - string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage - LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, **kwargs): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = kwargs.get('file_path', None) - self.linked_service_name = kwargs.get('linked_service_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py deleted file mode 100644 index abe75d9d9bf2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class AzureMLWebServiceFile(Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container - name, in the Azure Blob Storage specified by the LinkedService. Type: - string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage - LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = file_path - self.linked_service_name = linked_service_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py deleted file mode 100644 index aedbdbb73eb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py deleted file mode 100644 index 57692275f564..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py deleted file mode 100644 index 823336432567..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureMySqlSource(CopySource): - """A copy activity Azure MySQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py deleted file mode 100644 index 7030738d2615..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureMySqlSource(CopySource): - """A copy activity Azure MySQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py deleted file mode 100644 index 8f5d43478089..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Azure MySQL database table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py deleted file mode 100644 index 7bd7eb6f17f8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Azure MySQL database table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py deleted file mode 100644 index 92359d6d6a10..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py deleted file mode 100644 index 47f8f17980f8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py deleted file mode 100644 index 6214e1ba1f22..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzurePostgreSqlSink(CopySink): - """A copy activity Azure PostgreSQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py deleted file mode 100644 index b7cd0ec51a29..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzurePostgreSqlSink(CopySink): - """A copy activity Azure PostgreSQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py deleted file mode 100644 index e0cd62fd8028..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzurePostgreSqlSource(CopySource): - """A copy activity Azure PostgreSQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py deleted file mode 100644 index 0362b0dca390..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzurePostgreSqlSource(CopySource): - """A copy activity Azure PostgreSQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py deleted file mode 100644 index 933264b57a9b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Azure PostgreSQL database which - includes both schema and table. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type table: object - :param azure_postgre_sql_table_dataset_schema: The schema name of the - Azure PostgreSQL database. Type: string (or Expression with resultType - string). - :type azure_postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) - self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py deleted file mode 100644 index 485dc3efb102..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Azure PostgreSQL database which - includes both schema and table. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type table: object - :param azure_postgre_sql_table_dataset_schema: The schema name of the - Azure PostgreSQL database. Type: string (or Expression with resultType - string). - :type azure_postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema - self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py deleted file mode 100644 index 9f3a63db4978..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureQueueSink(CopySink): - """A copy activity Azure Queue sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureQueueSink, self).__init__(**kwargs) - self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py deleted file mode 100644 index db2fb60ddb1e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureQueueSink(CopySink): - """A copy activity Azure Queue sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py deleted file mode 100644 index 1239bbad78fc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index_name: Required. The name of the Azure Search Index. Type: - string (or Expression with resultType string). - :type index_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.index_name = kwargs.get('index_name', None) - self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py deleted file mode 100644 index da5e92dd2edd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index_name: Required. The name of the Azure Search Index. Type: - string (or Expression with resultType string). - :type index_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.index_name = index_name - self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py deleted file mode 100644 index 9aae64af8da0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureSearchIndexSink(CopySink): - """A copy activity Azure Search Index sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureSearchIndexSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py deleted file mode 100644 index 3cd887a2512c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureSearchIndexSink(CopySink): - """A copy activity Azure Search Index sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py deleted file mode 100644 index 782799cd5b28..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. URL for Azure Search service. Type: string (or - Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSearchLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.key = kwargs.get('key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py deleted file mode 100644 index 8589c3aead91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. URL for Azure Search service. Type: string (or - Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.key = key - self.encrypted_credential = encrypted_credential - self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py deleted file mode 100644 index 0da66637a04f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Database. Type: string (or Expression with - resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py deleted file mode 100644 index dbcf6c88b134..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Database. Type: string (or Expression with - resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py deleted file mode 100644 index cc7c9d58d19f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString - or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py deleted file mode 100644 index 5c75f3904b37..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString - or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py deleted file mode 100644 index ed9fe8904d73..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL - Data Warehouse. Type: string (or Expression with resultType string). - :type azure_sql_dw_table_dataset_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string - (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py deleted file mode 100644 index a38e4ab479c9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL - Data Warehouse. Type: string (or Expression with resultType string). - :type azure_sql_dw_table_dataset_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string - (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema - self.table = table - self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py deleted file mode 100644 index 2aab3a145ff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Managed Instance. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlMILinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py deleted file mode 100644 index ec1a2e5e8549..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Managed Instance. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py deleted file mode 100644 index 1128a9e8cb06..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL - Managed Instance. Type: string (or Expression with resultType string). - :type azure_sql_mi_table_dataset_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. - Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlMITableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py deleted file mode 100644 index ac72614e3ed4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL - Managed Instance. Type: string (or Expression with resultType string). - :type azure_sql_mi_table_dataset_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. - Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema - self.table = table - self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py deleted file mode 100644 index 441bf0c4279f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureSqlSink(CopySink): - """A copy activity Azure SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py deleted file mode 100644 index 6aa431ae57d6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureSqlSink(CopySink): - """A copy activity Azure SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py deleted file mode 100644 index b6c62f9a3164..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureSqlSource(CopySource): - """A copy activity Azure SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py deleted file mode 100644 index cb5c33d28bb2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureSqlSource(CopySource): - """A copy activity Azure SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py deleted file mode 100644 index ce8b08944f3a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_table_dataset_schema: The schema name of the Azure SQL - database. Type: string (or Expression with resultType string). - :type azure_sql_table_dataset_schema: object - :param table: The table name of the Azure SQL database. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py deleted file mode 100644 index 3ed19ee47e7e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_table_dataset_schema: The schema name of the Azure SQL - database. Type: string (or Expression with resultType string). - :type azure_sql_table_dataset_schema: object - :param table: The table name of the Azure SQL database. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema - self.table = table - self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py deleted file mode 100644 index 202dd7229b90..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py deleted file mode 100644 index 4fac19b70849..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential - self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py deleted file mode 100644 index eb8dacbfbb98..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The table name of the Azure Table storage. - Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py deleted file mode 100644 index d70a15fdd6f1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The table name of the Azure Table storage. - Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py deleted file mode 100644 index 3459c9ad3ba1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class AzureTableSink(CopySink): - """A copy activity Azure Table sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_default_partition_key_value: Azure Table default - partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object - :param azure_table_partition_key_name: Azure Table partition key name. - Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object - :param azure_table_row_key_name: Azure Table row key name. Type: string - (or Expression with resultType string). - :type azure_table_row_key_name: object - :param azure_table_insert_type: Azure Table insert type. Type: string (or - Expression with resultType string). - :type azure_table_insert_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureTableSink, self).__init__(**kwargs) - self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) - self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) - self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) - self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) - self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py deleted file mode 100644 index a15247544879..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureTableSink(CopySink): - """A copy activity Azure Table sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_default_partition_key_value: Azure Table default - partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object - :param azure_table_partition_key_name: Azure Table partition key name. - Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object - :param azure_table_row_key_name: Azure Table row key name. Type: string - (or Expression with resultType string). - :type azure_table_row_key_name: object - :param azure_table_insert_type: Azure Table insert type. Type: string (or - Expression with resultType string). - :type azure_table_insert_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.azure_table_default_partition_key_value = azure_table_default_partition_key_value - self.azure_table_partition_key_name = azure_table_partition_key_name - self.azure_table_row_key_name = azure_table_row_key_name - self.azure_table_insert_type = azure_table_insert_type - self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py deleted file mode 100644 index fa7ead73eaa9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class AzureTableSource(CopySource): - """A copy activity Azure Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_source_query: Azure Table source query. Type: string - (or Expression with resultType string). - :type azure_table_source_query: object - :param azure_table_source_ignore_table_not_found: Azure Table source - ignore table not found. Type: boolean (or Expression with resultType - boolean). - :type azure_table_source_ignore_table_not_found: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureTableSource, self).__init__(**kwargs) - self.azure_table_source_query = kwargs.get('azure_table_source_query', None) - self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) - self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py deleted file mode 100644 index efbac5613219..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureTableSource(CopySource): - """A copy activity Azure Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_source_query: Azure Table source query. Type: string - (or Expression with resultType string). - :type azure_table_source_query: object - :param azure_table_source_ignore_table_not_found: Azure Table source - ignore table not found. Type: boolean (or Expression with resultType - boolean). - :type azure_table_source_ignore_table_not_found: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.azure_table_source_query = azure_table_source_query - self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found - self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py deleted file mode 100644 index c2a8c2498ea6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py deleted file mode 100644 index 8d4e62c4f3e6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential - self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py deleted file mode 100644 index 5f0f8ef96696..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class BinaryDataset(Dataset): - """Binary dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary - dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(BinaryDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.compression = kwargs.get('compression', None) - self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py deleted file mode 100644 index 7d26b216fd7a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class BinaryDataset(Dataset): - """Binary dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary - dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.compression = compression - self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py deleted file mode 100644 index 2e3be83edca7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class BinarySink(CopySink): - """A copy activity Binary sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(BinarySink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py deleted file mode 100644 index accac13e3033..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class BinarySink(CopySink): - """A copy activity Binary sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py deleted file mode 100644 index 48e78e7d24bf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class BinarySource(CopySource): - """A copy activity Binary source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(BinarySource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py deleted file mode 100644 index aa9a9f1412ab..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class BinarySource(CopySource): - """A copy activity Binary source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py deleted file mode 100644 index 673d34167fed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger import MultiplePipelineTrigger - - -class BlobEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a Blob event occurs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern - provided for trigger to fire. For example, '/records/blobs/december/' will - only fire the trigger for blobs in the december folder under the records - container. At least one of these must be provided: blobPathBeginsWith, - blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern - provided for trigger to fire. For example, 'december/boxes.csv' will only - fire the trigger for blobs named boxes in a december folder. At least one - of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param events: Required. The type of events that cause this trigger to - fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(BlobEventsTrigger, self).__init__(**kwargs) - self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) - self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) - self.events = kwargs.get('events', None) - self.scope = kwargs.get('scope', None) - self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py deleted file mode 100644 index fb65a420a2cd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - - -class BlobEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a Blob event occurs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern - provided for trigger to fire. For example, '/records/blobs/december/' will - only fire the trigger for blobs in the december folder under the records - container. At least one of these must be provided: blobPathBeginsWith, - blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern - provided for trigger to fire. For example, 'december/boxes.csv' will only - fire the trigger for blobs named boxes in a december folder. At least one - of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param events: Required. The type of events that cause this trigger to - fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.blob_path_begins_with = blob_path_begins_with - self.blob_path_ends_with = blob_path_ends_with - self.events = events - self.scope = scope - self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py deleted file mode 100644 index 284e0fcecde5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class BlobSink(CopySink): - """A copy activity Azure Blob sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: - boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object - :param blob_writer_date_time_format: Blob writer date time format. Type: - string (or Expression with resultType string). - :type blob_writer_date_time_format: object - :param blob_writer_add_header: Blob writer add header. Type: boolean (or - Expression with resultType boolean). - :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(BlobSink, self).__init__(**kwargs) - self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) - self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) - self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py deleted file mode 100644 index 370acc72e017..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class BlobSink(CopySink): - """A copy activity Azure Blob sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: - boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object - :param blob_writer_date_time_format: Blob writer date time format. Type: - string (or Expression with resultType string). - :type blob_writer_date_time_format: object - :param blob_writer_add_header: Blob writer add header. Type: boolean (or - Expression with resultType boolean). - :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.blob_writer_overwrite_files = blob_writer_overwrite_files - self.blob_writer_date_time_format = blob_writer_date_time_format - self.blob_writer_add_header = blob_writer_add_header - self.copy_behavior = copy_behavior - self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py deleted file mode 100644 index ab4313a890cb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class BlobSource(CopySource): - """A copy activity Azure Blob source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(BlobSource, self).__init__(**kwargs) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) - self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py deleted file mode 100644 index 78d90cc61e13..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class BlobSource(CopySource): - """A copy activity Azure Blob source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive - self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py deleted file mode 100644 index 4fd5b5c17100..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger import MultiplePipelineTrigger - - -class BlobTrigger(MultiplePipelineTrigger): - """Trigger that runs every time the selected Blob container changes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will - trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to - handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service - reference. - :type linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, **kwargs): - super(BlobTrigger, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.linked_service = kwargs.get('linked_service', None) - self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py deleted file mode 100644 index cccffd881bfb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - - -class BlobTrigger(MultiplePipelineTrigger): - """Trigger that runs every time the selected Blob container changes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will - trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to - handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service - reference. - :type linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.folder_path = folder_path - self.max_concurrency = max_concurrency - self.linked_service = linked_service - self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py deleted file mode 100644 index ebba2be42028..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name for connection. Type: string (or - Expression with resultType string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression - with resultType integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CassandraLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py deleted file mode 100644 index f22f303cc61d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name for connection. Type: string (or - Expression with resultType string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression - with resultType integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.authentication_type = authentication_type - self.port = port - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py deleted file mode 100644 index e7ba96c18682..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class CassandraSource(CopySource): - """A copy activity source for a Cassandra database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression or - Cassandra Query Language (CQL) command. Type: string (or Expression with - resultType string). - :type query: object - :param consistency_level: The consistency level specifies how many - Cassandra servers must respond to a read request before returning data to - the client application. Cassandra checks the specified number of Cassandra - servers for data to satisfy the read request. Must be one of - cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(CassandraSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.consistency_level = kwargs.get('consistency_level', None) - self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py deleted file mode 100644 index bd95d158b868..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CassandraSource(CopySource): - """A copy activity source for a Cassandra database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression or - Cassandra Query Language (CQL) command. Type: string (or Expression with - resultType string). - :type query: object - :param consistency_level: The consistency level specifies how many - Cassandra servers must respond to a read request before returning data to - the client application. Cassandra checks the specified number of Cassandra - servers for data to satisfy the read request. Must be one of - cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.consistency_level = consistency_level - self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py deleted file mode 100644 index b89c324fd4d4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Cassandra database. Type: string - (or Expression with resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or - Expression with resultType string). - :type keyspace: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CassandraTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.keyspace = kwargs.get('keyspace', None) - self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py deleted file mode 100644 index 256358ce50cb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Cassandra database. Type: string - (or Expression with resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or - Expression with resultType string). - :type keyspace: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.keyspace = keyspace - self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py deleted file mode 100644 index c7cd4c315852..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) - self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py deleted file mode 100644 index 050bdb836963..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py deleted file mode 100644 index bbc8b7a0de65..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Common Data - Service for Apps instance. 'Online' for Common Data Service for Apps - Online and 'OnPremisesWithIfd' for Common Data Service for Apps - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for - Apps server. The property is required for on-prem and not allowed for - online. Type: string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. - The property is required for on-prem and not allowed for online. Default - is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps - server. The property is required for on-line and not allowed for on-prem. - Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service - for Apps instance. The property is required for on-prem and required for - online when there are more than one Common Data Service for Apps instances - associated with the user. Type: string (or Expression with resultType - string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Common Data Service for Apps server. 'Office365' for online scenario, - 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with - resultType string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Common Data Service for - Apps instance. Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py deleted file mode 100644 index 1c4897c09868..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Common Data - Service for Apps instance. 'Online' for Common Data Service for Apps - Online and 'OnPremisesWithIfd' for Common Data Service for Apps - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for - Apps server. The property is required for on-prem and not allowed for - online. Type: string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. - The property is required for on-prem and not allowed for online. Default - is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps - server. The property is required for on-line and not allowed for on-prem. - Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service - for Apps instance. The property is required for on-prem and required for - online when there are more than one Common Data Service for Apps instances - associated with the user. Type: string (or Expression with resultType - string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Common Data Service for Apps server. 'Office365' for online scenario, - 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with - resultType string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Common Data Service for - Apps instance. Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py deleted file mode 100644 index 0df48841cccc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class CommonDataServiceForAppsSink(CopySink): - """A copy activity Common Data Service for Apps sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py deleted file mode 100644 index 80f85e6d5809..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class CommonDataServiceForAppsSink(CopySink): - """A copy activity Common Data Service for Apps sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py deleted file mode 100644 index 13d2a6b921bb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class CommonDataServiceForAppsSource(CopySource): - """A copy activity Common Data Service for Apps source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Common Data Service for Apps (online & on-premises). Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py deleted file mode 100644 index 713db90f9623..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CommonDataServiceForAppsSource(CopySource): - """A copy activity Common Data Service for Apps source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Common Data Service for Apps (online & on-premises). Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py deleted file mode 100644 index 04179d0d1f53..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ConcurLinkedService(LinkedService): - """Concur Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. Application client_id supplied by Concur App - Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ConcurLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py deleted file mode 100644 index 4411db6d2856..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ConcurLinkedService(LinkedService): - """Concur Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. Application client_id supplied by Concur App - Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py deleted file mode 100644 index e2595f9d8aff..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ConcurObjectDataset(Dataset): - """Concur Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ConcurObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py deleted file mode 100644 index 9543a6395a32..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ConcurObjectDataset(Dataset): - """Concur Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py deleted file mode 100644 index 11ae557c0cda..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ConcurSource(CopySource): - """A copy activity Concur Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ConcurSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py deleted file mode 100644 index ac8ae8fb5a91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ConcurSource(CopySource): - """A copy activity Concur Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py deleted file mode 100644 index 2242bc36beb2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .activity import Activity - - -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebHookActivity, AppendVariableActivity, - SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, - WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} - } - - def __init__(self, **kwargs): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py deleted file mode 100644 index 0aabd99d741f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .activity_py3 import Activity - - -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebHookActivity, AppendVariableActivity, - SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, - WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py deleted file mode 100644 index 2e7c00d551ba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class CopyActivity(ExecutionActivity): - """Copy activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular - translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim - staging. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when - EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on - the source or sink to avoid overloading the data store. Type: integer (or - Expression with resultType integer), minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units - that can be used to perform this data movement. Type: integer (or - Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. - Default value is false. Type: boolean (or Expression with resultType - boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row - settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - } - - def __init__(self, **kwargs): - super(CopyActivity, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.sink = kwargs.get('sink', None) - self.translator = kwargs.get('translator', None) - self.enable_staging = kwargs.get('enable_staging', None) - self.staging_settings = kwargs.get('staging_settings', None) - self.parallel_copies = kwargs.get('parallel_copies', None) - self.data_integration_units = kwargs.get('data_integration_units', None) - self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) - self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) - self.preserve_rules = kwargs.get('preserve_rules', None) - self.preserve = kwargs.get('preserve', None) - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py deleted file mode 100644 index f8a1fee5625d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class CopyActivity(ExecutionActivity): - """Copy activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular - translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim - staging. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when - EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on - the source or sink to avoid overloading the data store. Type: integer (or - Expression with resultType integer), minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units - that can be used to perform this data movement. Type: integer (or - Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. - Default value is false. Type: boolean (or Expression with resultType - boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row - settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - } - - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.source = source - self.sink = sink - self.translator = translator - self.enable_staging = enable_staging - self.staging_settings = staging_settings - self.parallel_copies = parallel_copies - self.data_integration_units = data_integration_units - self.enable_skip_incompatible_row = enable_skip_incompatible_row - self.redirect_incompatible_row_settings = redirect_incompatible_row_settings - self.preserve_rules = preserve_rules - self.preserve = preserve - self.inputs = inputs - self.outputs = outputs - self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py deleted file mode 100644 index c0e000914325..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CopySink(Model): - """A copy activity sink. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzurePostgreSqlSink, DelimitedTextSink - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} - } - - def __init__(self, **kwargs): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.write_batch_size = kwargs.get('write_batch_size', None) - self.write_batch_timeout = kwargs.get('write_batch_timeout', None) - self.sink_retry_count = kwargs.get('sink_retry_count', None) - self.sink_retry_wait = kwargs.get('sink_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py deleted file mode 100644 index ae60b61e61c3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CopySink(Model): - """A copy activity sink. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzurePostgreSqlSink, DelimitedTextSink - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(CopySink, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.write_batch_size = write_batch_size - self.write_batch_timeout = write_batch_timeout - self.sink_retry_count = sink_retry_count - self.sink_retry_wait = sink_retry_wait - self.max_concurrent_connections = max_concurrent_connections - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py deleted file mode 100644 index 72ad6e8e136d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CopySource(Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, - OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, - SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, - XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, - HubspotSource, HiveSource, HBaseSource, GreenplumSource, - GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - DelimitedTextSource, ParquetSource, AvroSource - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} - } - - def __init__(self, **kwargs): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py deleted file mode 100644 index 960c56774e4c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CopySource(Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, - OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, - SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, - XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, - HubspotSource, HiveSource, HBaseSource, GreenplumSource, - GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - DelimitedTextSource, ParquetSource, AvroSource - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(CopySource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.source_retry_count = source_retry_count - self.source_retry_wait = source_retry_wait - self.max_concurrent_connections = max_concurrent_connections - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py deleted file mode 100644 index 6a8a462f6f46..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py deleted file mode 100644 index 57dab80e06b9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.encrypted_credential = encrypted_credential - self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py deleted file mode 100644 index d86648eb5eee..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the CosmosDB (MongoDB - API) database. Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.collection = kwargs.get('collection', None) - self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py deleted file mode 100644 index de2650fa14b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the CosmosDB (MongoDB - API) database. Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection = collection - self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py deleted file mode 100644 index f76e7c5f5a41..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The CosmosDB (MongoDB API) connection - string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database - that you want to access. Type: string (or Expression with resultType - string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.database = kwargs.get('database', None) - self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py deleted file mode 100644 index 2a72bfce4ff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The CosmosDB (MongoDB API) connection - string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database - that you want to access. Type: string (or Expression with resultType - string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py deleted file mode 100644 index 0d40b52add80..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class CosmosDbMongoDbApiSink(CopySink): - """A copy activity sink for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specifies whether the document with same key to be - overwritten (upsert) rather than throw exception (insert). The default - value is "insert". Type: string (or Expression with resultType string). - Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py deleted file mode 100644 index 5db1ee5c9d36..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class CosmosDbMongoDbApiSink(CopySink): - """A copy activity sink for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specifies whether the document with same key to be - overwritten (upsert) rather than throw exception (insert). The default - value is "insert". Type: string (or Expression with resultType string). - Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py deleted file mode 100644 index 44dc7443427b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class CosmosDbMongoDbApiSource(CopySource): - """A copy activity source for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py deleted file mode 100644 index 7d180f105abf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CosmosDbMongoDbApiSource(CopySource): - """A copy activity source for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size - self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py deleted file mode 100644 index 76e45648f941..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py deleted file mode 100644 index afe336f666de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.cred_string = cred_string - self.encrypted_credential = encrypted_credential - self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py deleted file mode 100644 index b355605417d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CouchbaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py deleted file mode 100644 index 1358fc20feba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py deleted file mode 100644 index 821274b9aae4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CouchbaseTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py deleted file mode 100644 index cf5299fd55a5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py deleted file mode 100644 index 0e7002dcf68a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CreateLinkedIntegrationRuntimeRequest(Model): - """The linked integration runtime information. - - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked - integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked - integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the - linked integration runtime belongs to. - :type data_factory_location: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.data_factory_name = kwargs.get('data_factory_name', None) - self.data_factory_location = kwargs.get('data_factory_location', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py deleted file mode 100644 index aad7d6fa5ac0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CreateLinkedIntegrationRuntimeRequest(Model): - """The linked integration runtime information. - - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked - integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked - integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the - linked integration runtime belongs to. - :type data_factory_location: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = name - self.subscription_id = subscription_id - self.data_factory_name = data_factory_name - self.data_factory_location = data_factory_location diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py deleted file mode 100644 index 18ec9f963e65..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CreateRunResponse(Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str - """ - - _validation = { - 'run_id': {'required': True}, - } - - _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = kwargs.get('run_id', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py deleted file mode 100644 index bb280441ae90..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CreateRunResponse(Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str - """ - - _validation = { - 'run_id': {'required': True}, - } - - _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__(self, *, run_id: str, **kwargs) -> None: - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = run_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py deleted file mode 100644 index 01cfb7335d37..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class CustomActivity(ExecutionActivity): - """Custom activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or - Expression with resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or - Expression with resultType string). - :type folder_path: object - :param reference_objects: Reference objects - :type reference_objects: - ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no - restriction on the keys or values that can be used. The user specified - custom activity has the full responsibility to consume and interpret the - content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted - for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CustomActivity, self).__init__(**kwargs) - self.command = kwargs.get('command', None) - self.resource_linked_service = kwargs.get('resource_linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_objects = kwargs.get('reference_objects', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.retention_time_in_days = kwargs.get('retention_time_in_days', None) - self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py deleted file mode 100644 index bf8326f053f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class CustomActivity(ExecutionActivity): - """Custom activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or - Expression with resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or - Expression with resultType string). - :type folder_path: object - :param reference_objects: Reference objects - :type reference_objects: - ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no - restriction on the keys or values that can be used. The user specified - custom activity has the full responsibility to consume and interpret the - content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted - for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - } - - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.command = command - self.resource_linked_service = resource_linked_service - self.folder_path = folder_path - self.reference_objects = reference_objects - self.extended_properties = extended_properties - self.retention_time_in_days = retention_time_in_days - self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py deleted file mode 100644 index 5f95a54612dd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CustomActivityReferenceObject(Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__(self, **kwargs): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = kwargs.get('linked_services', None) - self.datasets = kwargs.get('datasets', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py deleted file mode 100644 index f860f0141bd0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CustomActivityReferenceObject(Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = linked_services - self.datasets = datasets diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py deleted file mode 100644 index db14a05e7ad1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Custom linked service properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) - self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py deleted file mode 100644 index f7633ee28cbd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Custom linked service properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type_properties = type_properties - self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py deleted file mode 100644 index a242309c3fd1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class CustomDataset(Dataset): - """The custom dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Custom dataset properties. - :type type_properties: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CustomDataset, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) - self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py deleted file mode 100644 index c00dae2b2c56..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CustomDataset(Dataset): - """The custom dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Custom dataset properties. - :type type_properties: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: - super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type_properties = type_properties - self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py deleted file mode 100644 index 364dfd79d71a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains - the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously - used to run the job. Default value is 1. Type: integer (or Expression with - resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should - be selected to run first. The lower the number, the higher the priority. - Default value is 1000. Type: integer (or Expression with resultType - integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: - string (or Expression with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these - values : Semantic, Full and SingleBox. Type: string (or Expression with - resultType string). - :type compilation_mode: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) - self.priority = kwargs.get('priority', None) - self.parameters = kwargs.get('parameters', None) - self.runtime_version = kwargs.get('runtime_version', None) - self.compilation_mode = kwargs.get('compilation_mode', None) - self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py deleted file mode 100644 index 22623aa3622c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains - the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously - used to run the job. Default value is 1. Type: integer (or Expression with - resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should - be selected to run first. The lower the number, the higher the priority. - Default value is 1000. Type: integer (or Expression with resultType - integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: - string (or Expression with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these - values : Semantic, Full and SingleBox. Type: string (or Expression with - resultType string). - :type compilation_mode: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, - } - - def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.script_path = script_path - self.script_linked_service = script_linked_service - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.parameters = parameters - self.runtime_version = runtime_version - self.compilation_mode = compilation_mode - self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py deleted file mode 100644 index a49bd973e2b9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be - run in the Databricks Workspace. This path must begin with a slash. Type: - string (or Expression with resultType string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this - job.If the notebook takes a parameter that is not specified, the default - value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, **kwargs): - super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.notebook_path = kwargs.get('notebook_path', None) - self.base_parameters = kwargs.get('base_parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py deleted file mode 100644 index 7d2d464b7a1a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be - run in the Databricks Workspace. This path must begin with a slash. Type: - string (or Expression with resultType string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this - job.If the notebook takes a parameter that is not specified, the default - value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.notebook_path = notebook_path - self.base_parameters = base_parameters - self.libraries = libraries - self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py deleted file mode 100644 index 51e7245d12fe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, **kwargs): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.main_class_name = kwargs.get('main_class_name', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py deleted file mode 100644 index 6c33f3b51d1e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py deleted file mode 100644 index 56178d3882c5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, **kwargs): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.python_file = kwargs.get('python_file', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py deleted file mode 100644 index 5b16d0d5e9ef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py deleted file mode 100644 index 036e1912647e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Dataset(Model): - """The Azure Data Factory nested object which identifies data within different - data stores, such as tables, files, folders, and documents. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, - CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} - } - - def __init__(self, **kwargs): - super(Dataset, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.structure = kwargs.get('structure', None) - self.schema = kwargs.get('schema', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py deleted file mode 100644 index 71b041c5eb5b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression import DatasetCompression - - -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py deleted file mode 100644 index f97af4588e0a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py deleted file mode 100644 index c0c4e3d52624..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetCompression(Model): - """The compression method used on a dataset. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, - DatasetGZipCompression, DatasetBZip2Compression - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} - } - - def __init__(self, **kwargs): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py deleted file mode 100644 index 3b10abc69abf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetCompression(Model): - """The compression method used on a dataset. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, - DatasetGZipCompression, DatasetBZip2Compression - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py deleted file mode 100644 index 9c97e2bfa5e3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression import DatasetCompression - - -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The Deflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetDeflateCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) - self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py deleted file mode 100644 index 11d00081bc1c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The Deflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py deleted file mode 100644 index 882c84a1e84c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetFolder(Model): - """The folder that this Dataset is in. If not specified, Dataset will appear - at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(DatasetFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py deleted file mode 100644 index ea7fc313f967..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetFolder(Model): - """The folder that this Dataset is in. If not specified, Dataset will appear - at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, **kwargs) -> None: - super(DatasetFolder, self).__init__(**kwargs) - self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py deleted file mode 100644 index 4925127c7f0f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression import DatasetCompression - - -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The GZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetGZipCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) - self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py deleted file mode 100644 index 97346e06366d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The GZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py deleted file mode 100644 index 2c318a91cccb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetLocation(Model): - """Dataset location. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py deleted file mode 100644 index d4e32d753197..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetLocation(Model): - """Dataset location. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.folder_path = folder_path - self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py deleted file mode 100644 index 4a7ab9e3db12..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Dataset(Model): - """The Azure Data Factory nested object which identifies data within different - data stores, such as tables, files, folders, and documents. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, - ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, - MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, - Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, - CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(Dataset, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.structure = structure - self.schema = schema - self.linked_service_name = linked_service_name - self.parameters = parameters - self.annotations = annotations - self.folder = folder - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py deleted file mode 100644 index ca3d385f31ce..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetReference(Model): - """Dataset reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: - "DatasetReference" . - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "DatasetReference" - - def __init__(self, **kwargs): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py deleted file mode 100644 index 80162fd77da1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetReference(Model): - """Dataset reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: - "DatasetReference" . - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "DatasetReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py deleted file mode 100644 index a68fb563e425..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource import SubResource - - -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__(self, **kwargs): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py deleted file mode 100644 index 9cedba8bbce9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class DatasetResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`DatasetResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[DatasetResource]'} - } - - def __init__(self, *args, **kwargs): - - super(DatasetResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py deleted file mode 100644 index 6eb099dcb884..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(DatasetResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py deleted file mode 100644 index b3160565230d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetStorageFormat(Model): - """The format definition of a storage. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, - TextFormat - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} - } - - def __init__(self, **kwargs): - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.serializer = kwargs.get('serializer', None) - self.deserializer = kwargs.get('deserializer', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py deleted file mode 100644 index faf746642d9e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetStorageFormat(Model): - """The format definition of a storage. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, - TextFormat - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.serializer = serializer - self.deserializer = deserializer - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py deleted file mode 100644 index ed80bf3cbcf2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression import DatasetCompression - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) - self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py deleted file mode 100644 index 20abd6fe1088..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py deleted file mode 100644 index d163d2b93c18..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Db2LinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py deleted file mode 100644 index 44d784fa9bde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.database = database - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py deleted file mode 100644 index a6e8c31ffa1f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class Db2Source(CopySource): - """A copy activity source for Db2 databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Db2Source, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py deleted file mode 100644 index 20b169699ae0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class Db2Source(CopySource): - """A copy activity source for Db2 databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py deleted file mode 100644 index 34ba33a414d5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class DeleteActivity(ExecutionActivity): - """Delete activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path - will be deleted recursively. Default is false. Type: boolean (or - Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to - connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity - execution. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide - when enableLogging is true. - :type log_storage_settings: - ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, **kwargs): - super(DeleteActivity, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.enable_logging = kwargs.get('enable_logging', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.dataset = kwargs.get('dataset', None) - self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py deleted file mode 100644 index 5107d9a3381a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DeleteActivity(ExecutionActivity): - """Delete activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path - will be deleted recursively. Default is false. Type: boolean (or - Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to - connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity - execution. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide - when enableLogging is true. - :type log_storage_settings: - ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.recursive = recursive - self.max_concurrent_connections = max_concurrent_connections - self.enable_logging = enable_logging - self.log_storage_settings = log_storage_settings - self.dataset = dataset - self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py deleted file mode 100644 index bfee26fcd12c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class DelimitedTextDataset(Dataset): - """Delimited text dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: - :type compression_codec: object - :param compression_level: The data compression method used for - DelimitedText. - :type compression_level: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.compression_level = kwargs.get('compression_level', None) - self.quote_char = kwargs.get('quote_char', None) - self.escape_char = kwargs.get('escape_char', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.null_value = kwargs.get('null_value', None) - self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py deleted file mode 100644 index c2597e6a022b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DelimitedTextDataset(Dataset): - """Delimited text dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: - :type compression_codec: object - :param compression_level: The data compression method used for - DelimitedText. - :type compression_level: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.encoding_name = encoding_name - self.compression_codec = compression_codec - self.compression_level = compression_level - self.quote_char = quote_char - self.escape_char = escape_char - self.first_row_as_header = first_row_as_header - self.null_value = null_value - self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py deleted file mode 100644 index 364b103c426a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_read_settings import FormatReadSettings - - -class DelimitedTextReadSettings(FormatReadSettings): - """Delimited text read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip - when reading data from input files. Type: integer (or Expression with - resultType integer). - :type skip_line_count: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextReadSettings, self).__init__(**kwargs) - self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py deleted file mode 100644 index 62aa0327cfb9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_read_settings_py3 import FormatReadSettings - - -class DelimitedTextReadSettings(FormatReadSettings): - """Delimited text read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip - when reading data from input files. Type: integer (or Expression with - resultType integer). - :type skip_line_count: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: - super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py deleted file mode 100644 index 15e0e590b4ee..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class DelimitedTextSink(CopySink): - """A copy activity DelimitedText sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py deleted file mode 100644 index 6481f8021527..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DelimitedTextSink(CopySink): - """A copy activity DelimitedText sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py deleted file mode 100644 index 10a842ca374a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class DelimitedTextSource(CopySource): - """A copy activity DelimitedText source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py deleted file mode 100644 index e551e32c847e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DelimitedTextSource(CopySource): - """A copy activity DelimitedText source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py deleted file mode 100644 index 5e0d8db319e5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings import FormatWriteSettings - - -class DelimitedTextWriteSettings(FormatWriteSettings): - """Delimited text write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param quote_all_text: Indicates whether string values should always be - enclosed with quotes. Type: boolean (or Expression with resultType - boolean). - :type quote_all_text: object - :param file_extension: Required. The file extension used to create the - files. Type: string (or Expression with resultType string). - :type file_extension: object - """ - - _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextWriteSettings, self).__init__(**kwargs) - self.quote_all_text = kwargs.get('quote_all_text', None) - self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py deleted file mode 100644 index 2be019ab1e6a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings_py3 import FormatWriteSettings - - -class DelimitedTextWriteSettings(FormatWriteSettings): - """Delimited text write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param quote_all_text: Indicates whether string values should always be - enclosed with quotes. Type: boolean (or Expression with resultType - boolean). - :type quote_all_text: object - :param file_extension: Required. The file extension used to create the - files. Type: string (or Expression with resultType string). - :type file_extension: object - """ - - _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - } - - def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: - super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.quote_all_text = quote_all_text - self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py deleted file mode 100644 index 89e750df8f0d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DependencyReference(Model): - """Referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, - TriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} - } - - def __init__(self, **kwargs): - super(DependencyReference, self).__init__(**kwargs) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py deleted file mode 100644 index 1b0647b74991..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DependencyReference(Model): - """Referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, - TriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} - } - - def __init__(self, **kwargs) -> None: - super(DependencyReference, self).__init__(**kwargs) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py deleted file mode 100644 index a8065ec3cc06..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DistcpSettings(Model): - """Distcp settings. - - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn - ResourceManager endpoint. Type: string (or Expression with resultType - string). - :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which - will be used to store temp Distcp command script. The script file is - generated by ADF and will be removed after Copy job finished. Type: string - (or Expression with resultType string). - :type temp_script_path: object - :param distcp_options: Specifies the Distcp options. Type: string (or - Expression with resultType string). - :type distcp_options: object - """ - - _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, - } - - _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) - self.temp_script_path = kwargs.get('temp_script_path', None) - self.distcp_options = kwargs.get('distcp_options', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py deleted file mode 100644 index 628e2d207f8e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DistcpSettings(Model): - """Distcp settings. - - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn - ResourceManager endpoint. Type: string (or Expression with resultType - string). - :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which - will be used to store temp Distcp command script. The script file is - generated by ADF and will be removed after Copy job finished. Type: string - (or Expression with resultType string). - :type temp_script_path: object - :param distcp_options: Specifies the Distcp options. Type: string (or - Expression with resultType string). - :type distcp_options: object - """ - - _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, - } - - _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, - } - - def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = resource_manager_endpoint - self.temp_script_path = temp_script_path - self.distcp_options = distcp_options diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py deleted file mode 100644 index fb2b8d46fa9c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. Document Database collection name. Type: - string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.collection_name = kwargs.get('collection_name', None) - self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py deleted file mode 100644 index 5eb4dbbf0997..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. Document Database collection name. Type: - string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection_name = collection_name - self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py deleted file mode 100644 index c2908dc1dd05..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class DocumentDbCollectionSink(CopySink): - """A copy activity Document Database Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param nesting_separator: Nested properties separator. Default is . (dot). - Type: string (or Expression with resultType string). - :type nesting_separator: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DocumentDbCollectionSink, self).__init__(**kwargs) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py deleted file mode 100644 index f1410cd211a4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DocumentDbCollectionSink(CopySink): - """A copy activity Document Database Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param nesting_separator: Nested properties separator. Default is . (dot). - Type: string (or Expression with resultType string). - :type nesting_separator: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.nesting_separator = nesting_separator - self.write_behavior = write_behavior - self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py deleted file mode 100644 index 9fdd23f2795f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class DocumentDbCollectionSource(CopySource): - """A copy activity Document Database Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Documents query. Type: string (or Expression with resultType - string). - :type query: object - :param nesting_separator: Nested properties separator. Type: string (or - Expression with resultType string). - :type nesting_separator: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DocumentDbCollectionSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py deleted file mode 100644 index 9e0bf6382b04..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DocumentDbCollectionSource(CopySource): - """A copy activity Document Database Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Documents query. Type: string (or Expression with resultType - string). - :type query: object - :param nesting_separator: Nested properties separator. Type: string (or - Expression with resultType string). - :type nesting_separator: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.nesting_separator = nesting_separator - self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py deleted file mode 100644 index c5428ace02a2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class DrillLinkedService(LinkedService): - """Drill server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DrillLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py deleted file mode 100644 index 5fb0cb25ecdb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DrillLinkedService(LinkedService): - """Drill server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py deleted file mode 100644 index 9a3391f27786..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class DrillSource(CopySource): - """A copy activity Drill server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DrillSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py deleted file mode 100644 index 313183abab83..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DrillSource(CopySource): - """A copy activity Drill server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py deleted file mode 100644 index 3dfd5715deb9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class DrillTableDataset(Dataset): - """Drill server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression - with resultType string). - :type table: object - :param drill_table_dataset_schema: The schema name of the Drill. Type: - string (or Expression with resultType string). - :type drill_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DrillTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) - self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py deleted file mode 100644 index db46bdc4e0bd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DrillTableDataset(Dataset): - """Drill server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression - with resultType string). - :type table: object - :param drill_table_dataset_schema: The schema name of the Drill. Type: - string (or Expression with resultType string). - :type drill_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.drill_table_dataset_schema = drill_table_dataset_schema - self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py deleted file mode 100644 index 5ff0b150718b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and - Operations) instance OData endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - Mark this field as a SecureString to store it securely in Data Factory, or - reference a secret stored in Azure Key Vault. Type: string (or Expression - with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or - tenant ID) under which your application resides. Retrieve it by hovering - the mouse in the top-right corner of the Azure portal. Type: string (or - Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting - authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsAXLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py deleted file mode 100644 index 79d3a34ba313..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and - Operations) instance OData endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - Mark this field as a SecureString to store it securely in Data Factory, or - reference a secret stored in Azure Key Vault. Type: string (or Expression - with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or - tenant ID) under which your application resides. Retrieve it by hovering - the mouse in the top-right corner of the Azure portal. Type: string (or - Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting - authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py deleted file mode 100644 index 392b8ac7b971..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the Dynamics AX OData entity. Type: - string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsAXResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py deleted file mode 100644 index 6cade3e4aa59..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the Dynamics AX OData entity. Type: - string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py deleted file mode 100644 index 619bad0f75c9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class DynamicsAXSource(CopySource): - """A copy activity Dynamics AX source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsAXSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py deleted file mode 100644 index 7679e68bae7b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DynamicsAXSource(CopySource): - """A copy activity Dynamics AX source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py deleted file mode 100644 index ff4079761cf0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) - self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py deleted file mode 100644 index 4a1ef86b2dc6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py deleted file mode 100644 index aad71042bb04..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics CRM - instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for - Dynamics CRM on-premises with Ifd. Type: string (or Expression with - resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. - The property is required for on-prem and not allowed for online. Type: - string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The - property is required for on-line and not allowed for on-prem. Type: string - (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM - instance. The property is required for on-prem and required for online - when there are more than one Dynamics CRM instances associated with the - user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Dynamics CRM instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py deleted file mode 100644 index 2286301fabef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics CRM - instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for - Dynamics CRM on-premises with Ifd. Type: string (or Expression with - resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. - The property is required for on-prem and not allowed for online. Type: - string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The - property is required for on-line and not allowed for on-prem. Type: string - (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM - instance. The property is required for on-prem and required for online - when there are more than one Dynamics CRM instances associated with the - user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Dynamics CRM instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py deleted file mode 100644 index 2d0f462e0f59..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class DynamicsCrmSink(CopySink): - """A copy activity Dynamics CRM sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, **kwargs): - super(DynamicsCrmSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py deleted file mode 100644 index d9f4fcf092c8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DynamicsCrmSink(CopySink): - """A copy activity Dynamics CRM sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py deleted file mode 100644 index 641fad43f437..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class DynamicsCrmSource(CopySource): - """A copy activity Dynamics CRM source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsCrmSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py deleted file mode 100644 index 29c3e78609a5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DynamicsCrmSource(CopySource): - """A copy activity Dynamics CRM source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py deleted file mode 100644 index 435c6d153066..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) - self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py deleted file mode 100644 index 7ee671890354..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py deleted file mode 100644 index c925033d1240..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py +++ /dev/null @@ -1,109 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics - instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for - Dynamics on-premises with Ifd. Type: string (or Expression with resultType - string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics server. The - property is required for on-prem and not allowed for online. Type: string - (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property - is required for on-line and not allowed for on-prem. Type: string (or - Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics instance. - The property is required for on-prem and required for online when there - are more than one Dynamics instances associated with the user. Type: - string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - with Ifd scenario. Type: string (or Expression with resultType string). - :type authentication_type: object - :param username: Required. User name to access the Dynamics instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py deleted file mode 100644 index 07c028ff2477..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py +++ /dev/null @@ -1,109 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics - instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for - Dynamics on-premises with Ifd. Type: string (or Expression with resultType - string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics server. The - property is required for on-prem and not allowed for online. Type: string - (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property - is required for on-line and not allowed for on-prem. Type: string (or - Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics instance. - The property is required for on-prem and required for online when there - are more than one Dynamics instances associated with the user. Type: - string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - with Ifd scenario. Type: string (or Expression with resultType string). - :type authentication_type: object - :param username: Required. User name to access the Dynamics instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py deleted file mode 100644 index 45bac7b52064..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class DynamicsSink(CopySink): - """A copy activity Dynamics sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether ignore null values - from input dataset (except key fields) during write operation. Default is - false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, **kwargs): - super(DynamicsSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py deleted file mode 100644 index 5f736f9cf658..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DynamicsSink(CopySink): - """A copy activity Dynamics sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether ignore null values - from input dataset (except key fields) during write operation. Default is - false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py deleted file mode 100644 index d38f96fee911..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class DynamicsSource(CopySource): - """A copy activity Dynamics source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py deleted file mode 100644 index 12d83625bc6a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DynamicsSource(CopySource): - """A copy activity Dynamics source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py deleted file mode 100644 index 6249c2e2334b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. - eloqua.example.com) - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua - account in the form: sitename/username. (i.e. Eloqua/Alice) - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(EloquaLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py deleted file mode 100644 index 623d798036a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. - eloqua.example.com) - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua - account in the form: sitename/username. (i.e. Eloqua/Alice) - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py deleted file mode 100644 index 56adc0ce47c4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(EloquaObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py deleted file mode 100644 index 705f43cd225c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py deleted file mode 100644 index f016140189f1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class EloquaSource(CopySource): - """A copy activity Eloqua server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(EloquaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py deleted file mode 100644 index d200ff32fd9d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class EloquaSource(CopySource): - """A copy activity Eloqua server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py deleted file mode 100644 index 5db1448a5a55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class EntityReference(Model): - """The entity reference. - - :param type: The type of this referenced entity. Possible values include: - 'IntegrationRuntimeReference', 'LinkedServiceReference' - :type type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(EntityReference, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py deleted file mode 100644 index f87698b67a64..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class EntityReference(Model): - """The entity reference. - - :param type: The type of this referenced entity. Possible values include: - 'IntegrationRuntimeReference', 'LinkedServiceReference' - :type type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: - super(EntityReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py deleted file mode 100644 index 0008b5eee153..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class ExecutePipelineActivity(ControlActivity): - """Execute pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait - for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(ExecutePipelineActivity, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.parameters = kwargs.get('parameters', None) - self.wait_on_completion = kwargs.get('wait_on_completion', None) - self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py deleted file mode 100644 index addaafabe7b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class ExecutePipelineActivity(ControlActivity): - """Execute pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait - for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, - } - - def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.pipeline = pipeline - self.parameters = parameters - self.wait_on_completion = wait_on_completion - self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py deleted file mode 100644 index 9efa853dac86..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value - should be "x86" or "x64". Type: string (or Expression with resultType - string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: - string (or Expression with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. - Type: string (or Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: - ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the - SSIS package. - :type project_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the - SSIS package. - :type package_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers - to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers - to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS - package. - :type property_overrides: dict[str, - ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, - } - - def __init__(self, **kwargs): - super(ExecuteSSISPackageActivity, self).__init__(**kwargs) - self.package_location = kwargs.get('package_location', None) - self.runtime = kwargs.get('runtime', None) - self.logging_level = kwargs.get('logging_level', None) - self.environment_path = kwargs.get('environment_path', None) - self.execution_credential = kwargs.get('execution_credential', None) - self.connect_via = kwargs.get('connect_via', None) - self.project_parameters = kwargs.get('project_parameters', None) - self.package_parameters = kwargs.get('package_parameters', None) - self.project_connection_managers = kwargs.get('project_connection_managers', None) - self.package_connection_managers = kwargs.get('package_connection_managers', None) - self.property_overrides = kwargs.get('property_overrides', None) - self.log_location = kwargs.get('log_location', None) - self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py deleted file mode 100644 index 64efa9cd63ac..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value - should be "x86" or "x64". Type: string (or Expression with resultType - string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: - string (or Expression with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. - Type: string (or Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: - ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the - SSIS package. - :type project_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the - SSIS package. - :type package_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers - to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers - to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS - package. - :type property_overrides: dict[str, - ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, - } - - def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.package_location = package_location - self.runtime = runtime - self.logging_level = logging_level - self.environment_path = environment_path - self.execution_credential = execution_credential - self.connect_via = connect_via - self.project_parameters = project_parameters - self.package_parameters = package_parameters - self.project_connection_managers = project_connection_managers - self.package_connection_managers = package_connection_managers - self.property_overrides = property_overrides - self.log_location = log_location - self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py deleted file mode 100644 index 8c16eff2c753..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .activity import Activity - - -class ExecutionActivity(Activity): - """Base class for all execution activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, - SqlServerStoredProcedureActivity, CustomActivity, - ExecuteSSISPackageActivity, HDInsightSparkActivity, - HDInsightStreamingActivity, HDInsightMapReduceActivity, - HDInsightPigActivity, HDInsightHiveActivity, CopyActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - } - - _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} - } - - def __init__(self, **kwargs): - super(ExecutionActivity, self).__init__(**kwargs) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) - self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py deleted file mode 100644 index 5deb58db81a7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .activity_py3 import Activity - - -class ExecutionActivity(Activity): - """Base class for all execution activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, - SqlServerStoredProcedureActivity, CustomActivity, - ExecuteSSISPackageActivity, HDInsightSparkActivity, - HDInsightStreamingActivity, HDInsightMapReduceActivity, - HDInsightPigActivity, HDInsightHiveActivity, CopyActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - } - - _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.linked_service_name = linked_service_name - self.policy = policy - self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py deleted file mode 100644 index a6a2cc280b4d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ExposureControlRequest(Model): - """The exposure control request. - - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = kwargs.get('feature_name', None) - self.feature_type = kwargs.get('feature_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py deleted file mode 100644 index b3f4099fb972..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ExposureControlRequest(Model): - """The exposure control request. - - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = feature_name - self.feature_type = feature_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py deleted file mode 100644 index 868647e3c5b3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ExposureControlResponse(Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py deleted file mode 100644 index 1ac7138e7984..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ExposureControlResponse(Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py deleted file mode 100644 index 4b16ceca2794..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Expression(Model): - """Azure Data Factory expression definition. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression" . - :vartype type: str - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - type = "Expression" - - def __init__(self, **kwargs): - super(Expression, self).__init__(**kwargs) - self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py deleted file mode 100644 index c6ad023a57ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Expression(Model): - """Azure Data Factory expression definition. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression" . - :vartype type: str - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - type = "Expression" - - def __init__(self, *, value: str, **kwargs) -> None: - super(Expression, self).__init__(**kwargs) - self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py deleted file mode 100644 index 614b3d7fc97a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .resource import Resource - - -class Factory(Resource): - """Factory resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, **kwargs): - super(Factory, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.identity = kwargs.get('identity', None) - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py deleted file mode 100644 index 02cec39d8313..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .factory_repo_configuration import FactoryRepoConfiguration - - -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param host_name: GitHub Enterprise host name. For example: - https://github.mydomain.com - :type host_name: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FactoryGitHubConfiguration, self).__init__(**kwargs) - self.host_name = kwargs.get('host_name', None) - self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py deleted file mode 100644 index 23c5dbf21f0c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .factory_repo_configuration_py3 import FactoryRepoConfiguration - - -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param host_name: GitHub Enterprise host name. For example: - https://github.mydomain.com - :type host_name: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: - super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.host_name = host_name - self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py deleted file mode 100644 index dad745424af3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryIdentity(Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. The identity type. Currently the only supported type - is 'SystemAssigned'. Default value: "SystemAssigned" . - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - type = "SystemAssigned" - - def __init__(self, **kwargs): - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py deleted file mode 100644 index 567100d8c19e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryIdentity(Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. The identity type. Currently the only supported type - is 'SystemAssigned'. Default value: "SystemAssigned" . - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - type = "SystemAssigned" - - def __init__(self, **kwargs) -> None: - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py deleted file mode 100644 index 589b44defc56..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class FactoryPaged(Paged): - """ - A paging container for iterating over a list of :class:`Factory ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[Factory]'} - } - - def __init__(self, *args, **kwargs): - - super(FactoryPaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py deleted file mode 100644 index 0682aa5f8852..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .resource_py3 import Resource - - -class Factory(Resource): - """Factory resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: - super(Factory, self).__init__(location=location, tags=tags, **kwargs) - self.additional_properties = additional_properties - self.identity = identity - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py deleted file mode 100644 index 7c20db016c71..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryRepoConfiguration(Model): - """Factory's git repo information. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} - } - - def __init__(self, **kwargs): - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.repository_name = kwargs.get('repository_name', None) - self.collaboration_branch = kwargs.get('collaboration_branch', None) - self.root_folder = kwargs.get('root_folder', None) - self.last_commit_id = kwargs.get('last_commit_id', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py deleted file mode 100644 index eefed7978850..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryRepoConfiguration(Model): - """Factory's git repo information. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.account_name = account_name - self.repository_name = repository_name - self.collaboration_branch = collaboration_branch - self.root_folder = root_folder - self.last_commit_id = last_commit_id - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py deleted file mode 100644 index 44eac9d287ce..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryRepoUpdate(Model): - """Factory's git repo information. - - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, **kwargs): - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = kwargs.get('factory_resource_id', None) - self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py deleted file mode 100644 index 68aca7a48db8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryRepoUpdate(Model): - """Factory's git repo information. - - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = factory_resource_id - self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py deleted file mode 100644 index e9977fceff86..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryUpdateParameters(Model): - """Parameters for updating a factory resource. - - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - } - - def __init__(self, **kwargs): - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.identity = kwargs.get('identity', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py deleted file mode 100644 index 5bd523fedf3d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryUpdateParameters(Model): - """Parameters for updating a factory resource. - - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - } - - def __init__(self, *, tags=None, identity=None, **kwargs) -> None: - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = tags - self.identity = identity diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py deleted file mode 100644 index 6d07c68d23e3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .factory_repo_configuration import FactoryRepoConfiguration - - -class FactoryVSTSConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - 'project_name': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FactoryVSTSConfiguration, self).__init__(**kwargs) - self.project_name = kwargs.get('project_name', None) - self.tenant_id = kwargs.get('tenant_id', None) - self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py deleted file mode 100644 index 4f13c0959d63..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .factory_repo_configuration_py3 import FactoryRepoConfiguration - - -class FactoryVSTSConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - 'project_name': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: - super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.project_name = project_name - self.tenant_id = tenant_id - self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py deleted file mode 100644 index ffced5c2e689..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression - with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py deleted file mode 100644 index ec6fe58bb3a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression - with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py deleted file mode 100644 index edce5fe68a65..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class FileServerLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py deleted file mode 100644 index f7fb8354bcbc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class FileServerLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py deleted file mode 100644 index da9d0809e03a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class FileServerReadSettings(StoreReadSettings): - """File server read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py deleted file mode 100644 index 1fadb49b1795..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class FileServerReadSettings(StoreReadSettings): - """File server read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py deleted file mode 100644 index f254e46452de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings import StoreWriteSettings - - -class FileServerWriteSettings(StoreWriteSettings): - """File server write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerWriteSettings, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py deleted file mode 100644 index 26a48aca46f6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class FileServerWriteSettings(StoreWriteSettings): - """File server write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py deleted file mode 100644 index 6874f4c08929..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class FileShareDataset(Dataset): - """An on-premises file system dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the on-premises file system. Type: string - (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the on-premises file system. Type: string - (or Expression with resultType string). - :type file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of - files in the folderPath rather than all files. Type: string (or Expression - with resultType string). - :type file_filter: object - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(FileShareDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.file_filter = kwargs.get('file_filter', None) - self.compression = kwargs.get('compression', None) - self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py deleted file mode 100644 index 19e88a264e12..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class FileShareDataset(Dataset): - """An on-premises file system dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the on-premises file system. Type: string - (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the on-premises file system. Type: string - (or Expression with resultType string). - :type file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of - files in the folderPath rather than all files. Type: string (or Expression - with resultType string). - :type file_filter: object - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: - super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.file_filter = file_filter - self.compression = compression - self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py deleted file mode 100644 index 8b8f238c9534..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class FileSystemSink(CopySink): - """A copy activity file system sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileSystemSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py deleted file mode 100644 index 24f8623cbb02..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class FileSystemSink(CopySink): - """A copy activity file system sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py deleted file mode 100644 index 2986b1848153..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class FileSystemSource(CopySource): - """A copy activity file system source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileSystemSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py deleted file mode 100644 index 0598490ca51c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class FileSystemSource(CopySource): - """A copy activity file system source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py deleted file mode 100644 index 1346bb234695..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class FilterActivity(ControlActivity): - """Filter and return results from input array based on the conditions. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, - } - - def __init__(self, **kwargs): - super(FilterActivity, self).__init__(**kwargs) - self.items = kwargs.get('items', None) - self.condition = kwargs.get('condition', None) - self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py deleted file mode 100644 index a07cf01d1dd5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class FilterActivity(ControlActivity): - """Filter and return results from input array based on the conditions. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, - } - - def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.items = items - self.condition = condition - self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py deleted file mode 100644 index 5edfa2a8140e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class ForEachActivity(ControlActivity): - """This activity is used for iterating over a collection and execute given - activities. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param is_sequential: Should the loop be executed in sequence or in - parallel (max 50) - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of - parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, **kwargs): - super(ForEachActivity, self).__init__(**kwargs) - self.is_sequential = kwargs.get('is_sequential', None) - self.batch_count = kwargs.get('batch_count', None) - self.items = kwargs.get('items', None) - self.activities = kwargs.get('activities', None) - self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py deleted file mode 100644 index 7c5c887bb1d9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class ForEachActivity(ControlActivity): - """This activity is used for iterating over a collection and execute given - activities. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param is_sequential: Should the loop be executed in sequence or in - parallel (max 50) - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of - parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.is_sequential = is_sequential - self.batch_count = batch_count - self.items = items - self.activities = activities - self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py deleted file mode 100644 index d5213138b96a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FormatReadSettings(Model): - """Format read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py deleted file mode 100644 index 326da0277b89..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FormatReadSettings(Model): - """Format read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py deleted file mode 100644 index 2100c6055d0d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FormatWriteSettings(Model): - """Format write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py deleted file mode 100644 index 4150eceffc1c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FormatWriteSettings(Model): - """Format write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py deleted file mode 100644 index e023f9ae91f7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class FtpReadSettings(StoreReadSettings): - """Ftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param use_binary_transfer: Specify whether to use binary transfer mode - for FTP stores. - :type use_binary_transfer: bool - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(FtpReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py deleted file mode 100644 index 748d306307ac..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class FtpReadSettings(StoreReadSettings): - """Ftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param use_binary_transfer: Specify whether to use binary transfer mode - for FTP stores. - :type use_binary_transfer: bool - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py deleted file mode 100644 index e649ca56e37c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the FTP server. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for - client connections. Default value is 21. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS - channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP - server SSL certificate when connect over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FtpServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py deleted file mode 100644 index b38ad1c03f46..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the FTP server. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for - client connections. Default value is 21. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS - channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP - server SSL certificate when connect over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.enable_ssl = enable_ssl - self.enable_server_certificate_validation = enable_server_certificate_validation - self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py deleted file mode 100644 index 5d5e933036df..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class FtpServerLocation(DatasetLocation): - """The location of ftp server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FtpServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py deleted file mode 100644 index ac296bcfca31..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class FtpServerLocation(DatasetLocation): - """The location of ftp server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py deleted file mode 100644 index 7941189f2dcd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - } - - def __init__(self, **kwargs): - super(GetMetadataActivity, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) - self.field_list = kwargs.get('field_list', None) - self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py deleted file mode 100644 index b4d8eb17cab1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.dataset = dataset - self.field_list = field_list - self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py deleted file mode 100644 index 1be4a2afece0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GetSsisObjectMetadataRequest(Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = kwargs.get('metadata_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py deleted file mode 100644 index 310cd9783d81..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GetSsisObjectMetadataRequest(Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } - - def __init__(self, *, metadata_path: str=None, **kwargs) -> None: - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = metadata_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py deleted file mode 100644 index cadecdf70f44..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GitHubAccessTokenRequest(Model): - """Get GitHub access token request definition. - - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base - URL. - :type git_hub_access_token_base_url: str - """ - - _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, - } - - _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = kwargs.get('git_hub_access_code', None) - self.git_hub_client_id = kwargs.get('git_hub_client_id', None) - self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py deleted file mode 100644 index 7961e1bc33ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GitHubAccessTokenRequest(Model): - """Get GitHub access token request definition. - - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base - URL. - :type git_hub_access_token_base_url: str - """ - - _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, - } - - _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, - } - - def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = git_hub_access_code - self.git_hub_client_id = git_hub_client_id - self.git_hub_access_token_base_url = git_hub_access_token_base_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py deleted file mode 100644 index 4a4afce8f0f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GitHubAccessTokenResponse(Model): - """Get GitHub access token response definition. - - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str - """ - - _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = kwargs.get('git_hub_access_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py deleted file mode 100644 index 4f28ade6e914..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GitHubAccessTokenResponse(Model): - """Get GitHub access token response definition. - - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str - """ - - _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, - } - - def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = git_hub_access_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py deleted file mode 100644 index c460dd95c380..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py +++ /dev/null @@ -1,119 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_customer_id: Required. The Client customer ID of the AdWords - account that you want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the - manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.client_customer_id = kwargs.get('client_customer_id', None) - self.developer_token = kwargs.get('developer_token', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py deleted file mode 100644 index dfb3bc07e69f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py +++ /dev/null @@ -1,119 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_customer_id: Required. The Client customer ID of the AdWords - account that you want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the - manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_customer_id = client_customer_id - self.developer_token = developer_token - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py deleted file mode 100644 index 92b901b774ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py deleted file mode 100644 index e1272f978b8e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py deleted file mode 100644 index 8699057abe09..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class GoogleAdWordsSource(CopySource): - """A copy activity Google AdWords service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleAdWordsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py deleted file mode 100644 index 995d5324670b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class GoogleAdWordsSource(CopySource): - """A copy activity Google AdWords service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py deleted file mode 100644 index 45a535b95d43..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery - projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google - Drive. Allowing Google Drive access enables support for federated tables - that combine BigQuery data with data from Google Drive. The default value - is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.project = kwargs.get('project', None) - self.additional_projects = kwargs.get('additional_projects', None) - self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py deleted file mode 100644 index 146674a85531..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery - projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google - Drive. Allowing Google Drive access enables support for federated tables - that combine BigQuery data with data from Google Drive. The default value - is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.project = project - self.additional_projects = additional_projects - self.request_google_drive_scope = request_google_drive_scope - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py deleted file mode 100644 index 920489742bbf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - database + table properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type dataset: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.dataset = kwargs.get('dataset', None) - self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py deleted file mode 100644 index 205819f8eeef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - database + table properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type dataset: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.dataset = dataset - self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py deleted file mode 100644 index 3a28d2563a8b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class GoogleBigQuerySource(CopySource): - """A copy activity Google BigQuery service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleBigQuerySource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py deleted file mode 100644 index 49364b4d0e3f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class GoogleBigQuerySource(CopySource): - """A copy activity Google BigQuery service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py deleted file mode 100644 index 57913f779ca1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GreenplumLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py deleted file mode 100644 index bd707a5e85c9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py deleted file mode 100644 index 086f12419f4a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class GreenplumSource(CopySource): - """A copy activity Greenplum Database source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GreenplumSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py deleted file mode 100644 index 8b789deb43da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class GreenplumSource(CopySource): - """A copy activity Greenplum Database source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py deleted file mode 100644 index eb0ea08ee544..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression - with resultType string). - :type table: object - :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: - string (or Expression with resultType string). - :type greenplum_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GreenplumTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) - self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py deleted file mode 100644 index 7f37fff9108d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression - with resultType string). - :type table: object - :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: - string (or Expression with resultType string). - :type greenplum_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.greenplum_table_dataset_schema = greenplum_table_dataset_schema - self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py deleted file mode 100644 index b6affd5caa0d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HBaseLinkedService(LinkedService): - """HBase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the HBase server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the HBase instance uses to listen for - client connections. The default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version) - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use - to connect to the HBase server. Possible values include: 'Anonymous', - 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HBaseLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py deleted file mode 100644 index a8823e2e8937..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HBaseLinkedService(LinkedService): - """HBase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the HBase server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the HBase instance uses to listen for - client connections. The default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version) - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use - to connect to the HBase server. Possible values include: 'Anonymous', - 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py deleted file mode 100644 index 5de32bcb6871..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class HBaseObjectDataset(Dataset): - """HBase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HBaseObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py deleted file mode 100644 index 27fc0d1514ea..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HBaseObjectDataset(Dataset): - """HBase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py deleted file mode 100644 index eb6e3f1789bb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class HBaseSource(CopySource): - """A copy activity HBase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HBaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py deleted file mode 100644 index b2680e95c212..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HBaseSource(CopySource): - """A copy activity HBase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py deleted file mode 100644 index 4110b0f8b7de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when - the HDInsight cluster is with ESP (Enterprise Security Package) - :type query_timeout: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(HDInsightHiveActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.variables = kwargs.get('variables', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py deleted file mode 100644 index f8a5441fe767..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when - the HDInsight cluster is with ESP (Enterprise Security Package) - :type query_timeout: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.variables = variables - self.query_timeout = query_timeout - self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py deleted file mode 100644 index 810525342d82..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or - Expression with resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked - service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP - (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the - HDInsight is ADLS Gen2. Type: string (or Expression with resultType - string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HDInsightLinkedService, self).__init__(**kwargs) - self.cluster_uri = kwargs.get('cluster_uri', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.is_esp_enabled = kwargs.get('is_esp_enabled', None) - self.file_system = kwargs.get('file_system', None) - self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py deleted file mode 100644 index 5c384f7d6288..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or - Expression with resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked - service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP - (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the - HDInsight is ADLS Gen2. Type: string (or Expression with resultType - string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, - } - - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.cluster_uri = cluster_uri - self.user_name = user_name - self.password = password - self.linked_service_name = linked_service_name - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.encrypted_credential = encrypted_credential - self.is_esp_enabled = is_esp_enabled - self.file_system = file_system - self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py deleted file mode 100644 index 20655843e1db..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with - resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with - resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job - request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightMapReduceActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.class_name = kwargs.get('class_name', None) - self.jar_file_path = kwargs.get('jar_file_path', None) - self.jar_linked_service = kwargs.get('jar_linked_service', None) - self.jar_libs = kwargs.get('jar_libs', None) - self.defines = kwargs.get('defines', None) - self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py deleted file mode 100644 index dffa9f119069..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with - resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with - resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job - request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.class_name = class_name - self.jar_file_path = jar_file_path - self.jar_linked_service = jar_linked_service - self.jar_libs = jar_libs - self.defines = defines - self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py deleted file mode 100644 index d386aac9d9aa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ /dev/null @@ -1,237 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_size: Required. Number of worker/data nodes in the cluster. - Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand - HDInsight cluster. Specifies how long the on-demand HDInsight cluster - stays alive after completion of an activity run if there are no other - active jobs in the cluster. The minimum value is 5 mins. Type: string (or - Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string - (or Expression with resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be - used by the on-demand cluster for storing and processing data. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host - the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the - hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the - cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be - distinct with timestamp. Type: string (or Expression with resultType - string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string - (or Expression with resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to - cluster’s node (for Linux). Type: string (or Expression with resultType - string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect - cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage - accounts for the HDInsight linked service so that the Data Factory service - can register them on your behalf. - :type additional_linked_service_names: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service - that point to the HCatalog database. The on-demand HDInsight cluster is - created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with - resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. - Type: string (or Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as - in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters - (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters - (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters - (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration - parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters - (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters - (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters - (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the - HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the - HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for - the HDInsight cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand - cluster once it's up. Please refer to - https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the - cluster should be joined after creation. Type: string (or Expression with - resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If - virtualNetworkId was specified, then this property is required. Type: - string (or Expression with resultType string). - :type subnet_name: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) - self.cluster_size = kwargs.get('cluster_size', None) - self.time_to_live = kwargs.get('time_to_live', None) - self.version = kwargs.get('version', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.host_subscription_id = kwargs.get('host_subscription_id', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.cluster_resource_group = kwargs.get('cluster_resource_group', None) - self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) - self.cluster_user_name = kwargs.get('cluster_user_name', None) - self.cluster_password = kwargs.get('cluster_password', None) - self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) - self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) - self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.cluster_type = kwargs.get('cluster_type', None) - self.spark_version = kwargs.get('spark_version', None) - self.core_configuration = kwargs.get('core_configuration', None) - self.h_base_configuration = kwargs.get('h_base_configuration', None) - self.hdfs_configuration = kwargs.get('hdfs_configuration', None) - self.hive_configuration = kwargs.get('hive_configuration', None) - self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) - self.oozie_configuration = kwargs.get('oozie_configuration', None) - self.storm_configuration = kwargs.get('storm_configuration', None) - self.yarn_configuration = kwargs.get('yarn_configuration', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.head_node_size = kwargs.get('head_node_size', None) - self.data_node_size = kwargs.get('data_node_size', None) - self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) - self.script_actions = kwargs.get('script_actions', None) - self.virtual_network_id = kwargs.get('virtual_network_id', None) - self.subnet_name = kwargs.get('subnet_name', None) - self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py deleted file mode 100644 index 178585c9b51d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py +++ /dev/null @@ -1,237 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_size: Required. Number of worker/data nodes in the cluster. - Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand - HDInsight cluster. Specifies how long the on-demand HDInsight cluster - stays alive after completion of an activity run if there are no other - active jobs in the cluster. The minimum value is 5 mins. Type: string (or - Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string - (or Expression with resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be - used by the on-demand cluster for storing and processing data. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host - the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the - hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the - cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be - distinct with timestamp. Type: string (or Expression with resultType - string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string - (or Expression with resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to - cluster’s node (for Linux). Type: string (or Expression with resultType - string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect - cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage - accounts for the HDInsight linked service so that the Data Factory service - can register them on your behalf. - :type additional_linked_service_names: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service - that point to the HCatalog database. The on-demand HDInsight cluster is - created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with - resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. - Type: string (or Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as - in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters - (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters - (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters - (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration - parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters - (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters - (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters - (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the - HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the - HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for - the HDInsight cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand - cluster once it's up. Please refer to - https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the - cluster should be joined after creation. Type: string (or Expression with - resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If - virtualNetworkId was specified, then this property is required. Type: - string (or Expression with resultType string). - :type subnet_name: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - } - - def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.cluster_size = cluster_size - self.time_to_live = time_to_live - self.version = version - self.linked_service_name = linked_service_name - self.host_subscription_id = host_subscription_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.cluster_resource_group = cluster_resource_group - self.cluster_name_prefix = cluster_name_prefix - self.cluster_user_name = cluster_user_name - self.cluster_password = cluster_password - self.cluster_ssh_user_name = cluster_ssh_user_name - self.cluster_ssh_password = cluster_ssh_password - self.additional_linked_service_names = additional_linked_service_names - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.cluster_type = cluster_type - self.spark_version = spark_version - self.core_configuration = core_configuration - self.h_base_configuration = h_base_configuration - self.hdfs_configuration = hdfs_configuration - self.hive_configuration = hive_configuration - self.map_reduce_configuration = map_reduce_configuration - self.oozie_configuration = oozie_configuration - self.storm_configuration = storm_configuration - self.yarn_configuration = yarn_configuration - self.encrypted_credential = encrypted_credential - self.head_node_size = head_node_size - self.data_node_size = data_node_size - self.zookeeper_node_size = zookeeper_node_size - self.script_actions = script_actions - self.virtual_network_id = virtual_network_id - self.subnet_name = subnet_name - self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py deleted file mode 100644 index 61b939076db6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightPigActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py deleted file mode 100644 index fb149df91f39..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py deleted file mode 100644 index 7822344f012f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for - all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of - the code/package to be executed. Type: string (or Expression with - resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading - the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. - Type: string (or Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightSparkActivity, self).__init__(**kwargs) - self.root_path = kwargs.get('root_path', None) - self.entry_file_path = kwargs.get('entry_file_path', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) - self.class_name = kwargs.get('class_name', None) - self.proxy_user = kwargs.get('proxy_user', None) - self.spark_config = kwargs.get('spark_config', None) - self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py deleted file mode 100644 index 3f305901abb7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for - all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of - the code/package to be executed. Type: string (or Expression with - resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading - the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. - Type: string (or Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, - } - - def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.root_path = root_path - self.entry_file_path = entry_file_path - self.arguments = arguments - self.get_debug_info = get_debug_info - self.spark_job_linked_service = spark_job_linked_service - self.class_name = class_name - self.proxy_user = proxy_user - self.spark_config = spark_config - self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py deleted file mode 100644 index 42146a5d6cc6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or - Expression with resultType string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or - Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with - resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression - with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be - directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are - located. - :type file_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression - with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightStreamingActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.mapper = kwargs.get('mapper', None) - self.reducer = kwargs.get('reducer', None) - self.input = kwargs.get('input', None) - self.output = kwargs.get('output', None) - self.file_paths = kwargs.get('file_paths', None) - self.file_linked_service = kwargs.get('file_linked_service', None) - self.combiner = kwargs.get('combiner', None) - self.command_environment = kwargs.get('command_environment', None) - self.defines = kwargs.get('defines', None) - self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py deleted file mode 100644 index 2f5a301ff880..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or - Expression with resultType string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or - Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with - resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression - with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be - directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are - located. - :type file_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression - with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.mapper = mapper - self.reducer = reducer - self.input = input - self.output = output - self.file_paths = file_paths - self.file_linked_service = file_linked_service - self.combiner = combiner - self.command_environment = command_environment - self.defines = defines - self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py deleted file mode 100644 index b527f05a7e2f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - HDFS. Possible values are: Anonymous and Windows. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__(self, **kwargs): - super(HdfsLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py deleted file mode 100644 index e004701e1da0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - HDFS. Possible values are: Anonymous and Windows. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.encrypted_credential = encrypted_credential - self.user_name = user_name - self.password = password - self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py deleted file mode 100644 index a8f5d1ba332c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class HdfsLocation(DatasetLocation): - """The location of HDFS. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HdfsLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py deleted file mode 100644 index 2e07575bef0f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class HdfsLocation(DatasetLocation): - """The location of HDFS. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py deleted file mode 100644 index ec4b98c50385..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class HdfsReadSettings(StoreReadSettings): - """HDFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, **kwargs): - super(HdfsReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py deleted file mode 100644 index c37a045ec93c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class HdfsReadSettings(StoreReadSettings): - """HDFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py deleted file mode 100644 index be50590f6c32..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class HdfsSource(CopySource): - """A copy activity HDFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, **kwargs): - super(HdfsSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.distcp_settings = kwargs.get('distcp_settings', None) - self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py deleted file mode 100644 index 3c60cab46289..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HdfsSource(CopySource): - """A copy activity HDFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.distcp_settings = distcp_settings - self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py deleted file mode 100644 index c54c1393d56e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py +++ /dev/null @@ -1,147 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HiveLinkedService(LinkedService): - """Hive Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Hive server, - separated by ';' for multiple hosts (only when serviceDiscoveryMode is - enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client - connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: - 'HiveServer1', 'HiveServer2', 'HiveThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Hive server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper - service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive - Server 2 nodes are added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL - queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HiveLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.service_discovery_mode = kwargs.get('service_discovery_mode', None) - self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) - self.use_native_query = kwargs.get('use_native_query', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py deleted file mode 100644 index 611d30ecb781..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py +++ /dev/null @@ -1,147 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HiveLinkedService(LinkedService): - """Hive Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Hive server, - separated by ';' for multiple hosts (only when serviceDiscoveryMode is - enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client - connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: - 'HiveServer1', 'HiveServer2', 'HiveThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Hive server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper - service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive - Server 2 nodes are added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL - queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.service_discovery_mode = service_discovery_mode - self.zoo_keeper_name_space = zoo_keeper_name_space - self.use_native_query = use_native_query - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py deleted file mode 100644 index 07b6f2b54901..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class HiveObjectDataset(Dataset): - """Hive Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with - resultType string). - :type table: object - :param hive_object_dataset_schema: The schema name of the Hive. Type: - string (or Expression with resultType string). - :type hive_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HiveObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) - self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py deleted file mode 100644 index 69384bdfa99a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HiveObjectDataset(Dataset): - """Hive Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with - resultType string). - :type table: object - :param hive_object_dataset_schema: The schema name of the Hive. Type: - string (or Expression with resultType string). - :type hive_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.hive_object_dataset_schema = hive_object_dataset_schema - self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py deleted file mode 100644 index 3af88c3280e3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class HiveSource(CopySource): - """A copy activity Hive Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HiveSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py deleted file mode 100644 index 6c09191b8c1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HiveSource(CopySource): - """A copy activity Hive Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py deleted file mode 100644 index f2184dea151f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class HttpDataset(Dataset): - """A file in an HTTP web server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL based on the URL in the - HttpLinkedService refers to an HTTP file Type: string (or Expression with - resultType string). - :type relative_url: object - :param request_method: The HTTP method for the HTTP request. Type: string - (or Expression with resultType string). - :type request_method: object - :param request_body: The body for the HTTP request. Type: string (or - Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. - request-header-name-1:request-header-value-1 - ... - request-header-name-n:request-header-value-n Type: string (or Expression - with resultType string). - :type additional_headers: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(HttpDataset, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py deleted file mode 100644 index 09f97a03a95d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HttpDataset(Dataset): - """A file in an HTTP web server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL based on the URL in the - HttpLinkedService refers to an HTTP file Type: string (or Expression with - resultType string). - :type relative_url: object - :param request_method: The HTTP method for the HTTP request. Type: string - (or Expression with resultType string). - :type request_method: object - :param request_body: The body for the HTTP request. Type: string (or - Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. - request-header-name-1:request-header-value-1 - ... - request-header-name-n:request-header-value-n Type: string (or Expression - with resultType string). - :type additional_headers: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: - super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.format = format - self.compression = compression - self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py deleted file mode 100644 index 6232bc45fee4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py +++ /dev/null @@ -1,105 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the HTTP endpoint, e.g. - http://www.microsoft.com. Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: The authentication type to be used to connect - to the HTTP server. Possible values include: 'Basic', 'Anonymous', - 'Digest', 'Windows', 'ClientCertificate' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. - Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate - with EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for - ClientCertificate authentication. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate - authentication. Only valid for on-premises copy. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS - server SSL certificate. Default value is true. Type: boolean (or - Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.embedded_cert_data = kwargs.get('embedded_cert_data', None) - self.cert_thumbprint = kwargs.get('cert_thumbprint', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py deleted file mode 100644 index 7f70adb08425..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py +++ /dev/null @@ -1,105 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the HTTP endpoint, e.g. - http://www.microsoft.com. Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: The authentication type to be used to connect - to the HTTP server. Possible values include: 'Basic', 'Anonymous', - 'Digest', 'Windows', 'ClientCertificate' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. - Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate - with EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for - ClientCertificate authentication. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate - authentication. Only valid for on-premises copy. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS - server SSL certificate. Default value is true. Type: boolean (or - Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.embedded_cert_data = embedded_cert_data - self.cert_thumbprint = cert_thumbprint - self.encrypted_credential = encrypted_credential - self.enable_server_certificate_validation = enable_server_certificate_validation - self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py deleted file mode 100644 index a7c175da3489..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class HttpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param request_timeout: Specifies the timeout for a HTTP client to get - HTTP response from HTTP server. - :type request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpReadSettings, self).__init__(**kwargs) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.request_timeout = kwargs.get('request_timeout', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py deleted file mode 100644 index 7cea9207c996..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class HttpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param request_timeout: Specifies the timeout for a HTTP client to get - HTTP response from HTTP server. - :type request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py deleted file mode 100644 index 94106fae9d15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class HttpServerLocation(DatasetLocation): - """The location of http server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param relative_url: Specify the relativeUrl of http server. Type: string - (or Expression with resultType string) - :type relative_url: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpServerLocation, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py deleted file mode 100644 index c52c53dcf357..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class HttpServerLocation(DatasetLocation): - """The location of http server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param relative_url: Specify the relativeUrl of http server. Type: string - (or Expression with resultType string) - :type relative_url: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: - super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py deleted file mode 100644 index ae131aa16c8c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class HttpSource(CopySource): - """A copy activity source for an HTTP file. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param http_request_timeout: Specifies the timeout for a HTTP client to - get HTTP response from HTTP server. The default value is equivalent to - System.Net.HttpWebRequest.Timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpSource, self).__init__(**kwargs) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py deleted file mode 100644 index df339fc3aef7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HttpSource(CopySource): - """A copy activity source for an HTTP file. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param http_request_timeout: Specifies the timeout for a HTTP client to - get HTTP response from HTTP server. The default value is equivalent to - System.Net.HttpWebRequest.Timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.http_request_timeout = http_request_timeout - self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py deleted file mode 100644 index 3d0d6cb3a6f4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with your Hubspot - application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially - authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially - authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HubspotLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.access_token = kwargs.get('access_token', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py deleted file mode 100644 index 272d613e9cd1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with your Hubspot - application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially - authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially - authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.client_secret = client_secret - self.access_token = access_token - self.refresh_token = refresh_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py deleted file mode 100644 index ce8994b4db4a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HubspotObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py deleted file mode 100644 index bd2309101f72..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py deleted file mode 100644 index b4b4c618c33e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class HubspotSource(CopySource): - """A copy activity Hubspot Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HubspotSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py deleted file mode 100644 index a29811342ce0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HubspotSource(CopySource): - """A copy activity Hubspot Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py deleted file mode 100644 index a8cb1da690e1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class IfConditionActivity(ControlActivity): - """This activity evaluates a boolean expression and executes either the - activities under the ifTrueActivities property or the ifFalseActivities - property depending on the result of the expression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - This is used to determine the block of activities (ifTrueActivities or - ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is - evaluated to true. This is an optional property and if not provided, the - activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is - evaluated to false. This is an optional property and if not provided, the - activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, - } - - def __init__(self, **kwargs): - super(IfConditionActivity, self).__init__(**kwargs) - self.expression = kwargs.get('expression', None) - self.if_true_activities = kwargs.get('if_true_activities', None) - self.if_false_activities = kwargs.get('if_false_activities', None) - self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py deleted file mode 100644 index 7921a2602807..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class IfConditionActivity(ControlActivity): - """This activity evaluates a boolean expression and executes either the - activities under the ifTrueActivities property or the ifFalseActivities - property depending on the result of the expression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - This is used to determine the block of activities (ifTrueActivities or - ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is - evaluated to true. This is an optional property and if not provided, the - activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is - evaluated to false. This is an optional property and if not provided, the - activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.expression = expression - self.if_true_activities = if_true_activities - self.if_false_activities = if_false_activities - self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py deleted file mode 100644 index a704852652db..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py +++ /dev/null @@ -1,117 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ImpalaLinkedService(LinkedService): - """Impala server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Impala server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Impala server uses to listen for client - connections. The default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Anonymous', 'SASLUsername', - 'UsernameAndPassword' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The - default value is anonymous when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using - UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ImpalaLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py deleted file mode 100644 index 55b2e0c861d7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py +++ /dev/null @@ -1,117 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ImpalaLinkedService(LinkedService): - """Impala server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Impala server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Impala server uses to listen for client - connections. The default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Anonymous', 'SASLUsername', - 'UsernameAndPassword' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The - default value is anonymous when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using - UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py deleted file mode 100644 index 8faee4f09240..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ImpalaObjectDataset(Dataset): - """Impala server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression - with resultType string). - :type table: object - :param impala_object_dataset_schema: The schema name of the Impala. Type: - string (or Expression with resultType string). - :type impala_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ImpalaObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) - self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py deleted file mode 100644 index 5652b5c9e4b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ImpalaObjectDataset(Dataset): - """Impala server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression - with resultType string). - :type table: object - :param impala_object_dataset_schema: The schema name of the Impala. Type: - string (or Expression with resultType string). - :type impala_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.impala_object_dataset_schema = impala_object_dataset_schema - self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py deleted file mode 100644 index 9e27dbdb6266..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ImpalaSource(CopySource): - """A copy activity Impala server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ImpalaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py deleted file mode 100644 index f7dc4016d020..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ImpalaSource(CopySource): - """A copy activity Impala server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py deleted file mode 100644 index 2a58e7a0f7d3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class InformixLinkedService(LinkedService): - """Informix linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Informix as ODBC data store. Possible values are: Anonymous and Basic. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py deleted file mode 100644 index 03aadada664d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class InformixLinkedService(LinkedService): - """Informix linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Informix as ODBC data store. Possible values are: Anonymous and Basic. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py deleted file mode 100644 index c511f4ecc174..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class InformixSink(CopySink): - """A copy activity Informix sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py deleted file mode 100644 index b0681ec0d423..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class InformixSink(CopySink): - """A copy activity Informix sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py deleted file mode 100644 index 6cab908c7014..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class InformixSource(CopySource): - """A copy activity source for Informix. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py deleted file mode 100644 index ed8fb0221239..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class InformixSource(CopySource): - """A copy activity source for Informix. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py deleted file mode 100644 index 8b7364bff652..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class InformixTableDataset(Dataset): - """The Informix table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Informix table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py deleted file mode 100644 index 05c458e797b1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class InformixTableDataset(Dataset): - """The Informix table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Informix table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py deleted file mode 100644 index 5dd45d16f76e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntime(Model): - """Azure Data Factory nested object which serves as a compute resource for - activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} - } - - def __init__(self, **kwargs): - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py deleted file mode 100644 index 12ed6925585e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeAuthKeys(Model): - """The integration runtime authentication keys. - - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str - """ - - _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = kwargs.get('auth_key1', None) - self.auth_key2 = kwargs.get('auth_key2', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py deleted file mode 100644 index b807d4cd5b55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeAuthKeys(Model): - """The integration runtime authentication keys. - - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str - """ - - _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, - } - - def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = auth_key1 - self.auth_key2 = auth_key2 diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py deleted file mode 100644 index e387ef4077f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeComputeProperties(Model): - """The compute resource properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param location: The location for managed integration runtime. The - supported regions could be found on - https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities - :type location: str - :param node_size: The node size requirement to managed integration - runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed - integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count - per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties - """ - - _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.location = kwargs.get('location', None) - self.node_size = kwargs.get('node_size', None) - self.number_of_nodes = kwargs.get('number_of_nodes', None) - self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) - self.v_net_properties = kwargs.get('v_net_properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py deleted file mode 100644 index f47f339dd067..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeComputeProperties(Model): - """The compute resource properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param location: The location for managed integration runtime. The - supported regions could be found on - https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities - :type location: str - :param node_size: The node size requirement to managed integration - runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed - integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count - per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties - """ - - _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, - } - - def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.location = location - self.node_size = node_size - self.number_of_nodes = number_of_nodes - self.max_parallel_executions_per_node = max_parallel_executions_per_node - self.v_net_properties = v_net_properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py deleted file mode 100644 index c185f916e8e5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeConnectionInfo(Model): - """Connection information for encrypting the on-premises data source - credentials. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar service_token: The token generated in service. Callers use this - token to authenticate to integration runtime. - :vartype service_token: str - :ivar identity_cert_thumbprint: The integration runtime SSL certificate - thumbprint. Click-Once application uses it to do server validation. - :vartype identity_cert_thumbprint: str - :ivar host_service_uri: The on-premises integration runtime host URL. - :vartype host_service_uri: str - :ivar version: The integration runtime version. - :vartype version: str - :ivar public_key: The public key for encrypting a credential when - transferring the credential to the integration runtime. - :vartype public_key: str - :ivar is_identity_cert_exprired: Whether the identity certificate is - expired. - :vartype is_identity_cert_exprired: bool - """ - - _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.service_token = None - self.identity_cert_thumbprint = None - self.host_service_uri = None - self.version = None - self.public_key = None - self.is_identity_cert_exprired = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py deleted file mode 100644 index 8cc5aceb16d7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeConnectionInfo(Model): - """Connection information for encrypting the on-premises data source - credentials. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar service_token: The token generated in service. Callers use this - token to authenticate to integration runtime. - :vartype service_token: str - :ivar identity_cert_thumbprint: The integration runtime SSL certificate - thumbprint. Click-Once application uses it to do server validation. - :vartype identity_cert_thumbprint: str - :ivar host_service_uri: The on-premises integration runtime host URL. - :vartype host_service_uri: str - :ivar version: The integration runtime version. - :vartype version: str - :ivar public_key: The public key for encrypting a credential when - transferring the credential to the integration runtime. - :vartype public_key: str - :ivar is_identity_cert_exprired: Whether the identity certificate is - expired. - :vartype is_identity_cert_exprired: bool - """ - - _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.service_token = None - self.identity_cert_thumbprint = None - self.host_service_uri = None - self.version = None - self.public_key = None - self.is_identity_cert_exprired = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py deleted file mode 100644 index 44cd5fe5979b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeCustomSetupScriptProperties(Model): - """Custom setup script properties for a managed dedicated integration runtime. - - :param blob_container_uri: The URI of the Azure blob container that - contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString - """ - - _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = kwargs.get('blob_container_uri', None) - self.sas_token = kwargs.get('sas_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py deleted file mode 100644 index 7f3c08c0b339..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeCustomSetupScriptProperties(Model): - """Custom setup script properties for a managed dedicated integration runtime. - - :param blob_container_uri: The URI of the Azure blob container that - contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString - """ - - _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, - } - - def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = blob_container_uri - self.sas_token = sas_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py deleted file mode 100644 index ebc0e9b38d6f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeDataProxyProperties(Model): - """Data proxy properties for a managed dedicated integration runtime. - - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: - ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str - """ - - _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = kwargs.get('connect_via', None) - self.staging_linked_service = kwargs.get('staging_linked_service', None) - self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py deleted file mode 100644 index 532b774cad3d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeDataProxyProperties(Model): - """Data proxy properties for a managed dedicated integration runtime. - - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: - ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str - """ - - _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = connect_via - self.staging_linked_service = staging_linked_service - self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py deleted file mode 100644 index f7b695729403..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeMonitoringData(Model): - """Get monitoring data response. - - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.nodes = kwargs.get('nodes', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py deleted file mode 100644 index 16f3b656c9cc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeMonitoringData(Model): - """Get monitoring data response. - - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, - } - - def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = name - self.nodes = nodes diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py deleted file mode 100644 index 2edabd3e2472..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeNodeIpAddress(Model): - """The IP address of self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar ip_address: The IP address of self-hosted integration runtime node. - :vartype ip_address: str - """ - - _validation = { - 'ip_address': {'readonly': True}, - } - - _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) - self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py deleted file mode 100644 index 476be9815984..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeNodeIpAddress(Model): - """The IP address of self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar ip_address: The IP address of self-hosted integration runtime node. - :vartype ip_address: str - """ - - _validation = { - 'ip_address': {'readonly': True}, - } - - _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) - self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py deleted file mode 100644 index 9d27bedf70aa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeNodeMonitoringData(Model): - """Monitoring data for integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar available_memory_in_mb: Available memory (MB) on the integration - runtime node. - :vartype available_memory_in_mb: int - :ivar cpu_utilization: CPU percentage on the integration runtime node. - :vartype cpu_utilization: int - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar concurrent_jobs_running: The number of jobs currently running on the - integration runtime node. - :vartype concurrent_jobs_running: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - :ivar sent_bytes: Sent bytes on the integration runtime node. - :vartype sent_bytes: float - :ivar received_bytes: Received bytes on the integration runtime node. - :vartype received_bytes: float - """ - - _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_name = None - self.available_memory_in_mb = None - self.cpu_utilization = None - self.concurrent_jobs_limit = None - self.concurrent_jobs_running = None - self.max_concurrent_jobs = None - self.sent_bytes = None - self.received_bytes = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py deleted file mode 100644 index 35c7e664b2ff..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeNodeMonitoringData(Model): - """Monitoring data for integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar available_memory_in_mb: Available memory (MB) on the integration - runtime node. - :vartype available_memory_in_mb: int - :ivar cpu_utilization: CPU percentage on the integration runtime node. - :vartype cpu_utilization: int - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar concurrent_jobs_running: The number of jobs currently running on the - integration runtime node. - :vartype concurrent_jobs_running: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - :ivar sent_bytes: Sent bytes on the integration runtime node. - :vartype sent_bytes: float - :ivar received_bytes: Received bytes on the integration runtime node. - :vartype received_bytes: float - """ - - _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_name = None - self.available_memory_in_mb = None - self.cpu_utilization = None - self.concurrent_jobs_limit = None - self.concurrent_jobs_running = None - self.max_concurrent_jobs = None - self.sent_bytes = None - self.received_bytes = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py deleted file mode 100644 index b4056a07591b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntime(Model): - """Azure Data Factory nested object which serves as a compute resource for - activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} - } - - def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py deleted file mode 100644 index 7461d29de284..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeReference(Model): - """Integration runtime reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference" . - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "IntegrationRuntimeReference" - - def __init__(self, **kwargs): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py deleted file mode 100644 index 56fd3608ba61..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeReference(Model): - """Integration runtime reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference" . - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "IntegrationRuntimeReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py deleted file mode 100644 index 3cd91195af1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeRegenerateKeyParameters(Model): - """Parameters to regenerate the authentication key. - - :param key_name: The name of the authentication key to regenerate. - Possible values include: 'authKey1', 'authKey2' - :type key_name: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName - """ - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = kwargs.get('key_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py deleted file mode 100644 index f3846cf8ec55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeRegenerateKeyParameters(Model): - """Parameters to regenerate the authentication key. - - :param key_name: The name of the authentication key to regenerate. - Possible values include: 'authKey1', 'authKey2' - :type key_name: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName - """ - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - } - - def __init__(self, *, key_name=None, **kwargs) -> None: - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py deleted file mode 100644 index b18f376d3698..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource import SubResource - - -class IntegrationRuntimeResource(SubResource): - """Integration runtime resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py deleted file mode 100644 index cef89866884e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class IntegrationRuntimeResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`IntegrationRuntimeResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[IntegrationRuntimeResource]'} - } - - def __init__(self, *args, **kwargs): - - super(IntegrationRuntimeResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py deleted file mode 100644 index 9239f54166f9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class IntegrationRuntimeResource(SubResource): - """Integration runtime resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py deleted file mode 100644 index 3399f8f38300..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeSsisCatalogInfo(Model): - """Catalog information for managed dedicated integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog - database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user - account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. - The valid values could be found in - https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible - values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' - :type catalog_pricing_tier: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - """ - - _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) - self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) - self.catalog_admin_password = kwargs.get('catalog_admin_password', None) - self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py deleted file mode 100644 index 27996bb4aeb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeSsisCatalogInfo(Model): - """Catalog information for managed dedicated integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog - database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user - account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. - The valid values could be found in - https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible - values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' - :type catalog_pricing_tier: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - """ - - _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.catalog_server_endpoint = catalog_server_endpoint - self.catalog_admin_user_name = catalog_admin_user_name - self.catalog_admin_password = catalog_admin_password - self.catalog_pricing_tier = catalog_pricing_tier diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py deleted file mode 100644 index 293f071aa0b3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeSsisProperties(Model): - """SSIS properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_info: Catalog information for managed dedicated integration - runtime. - :type catalog_info: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. - Possible values include: 'BasePrice', 'LicenseIncluded' - :type license_type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for - a managed dedicated integration runtime. - :type custom_setup_script_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed - dedicated integration runtime. - :type data_proxy_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible - values include: 'Standard', 'Enterprise' - :type edition: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_info = kwargs.get('catalog_info', None) - self.license_type = kwargs.get('license_type', None) - self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) - self.data_proxy_properties = kwargs.get('data_proxy_properties', None) - self.edition = kwargs.get('edition', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py deleted file mode 100644 index f75775e29a7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeSsisProperties(Model): - """SSIS properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_info: Catalog information for managed dedicated integration - runtime. - :type catalog_info: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. - Possible values include: 'BasePrice', 'LicenseIncluded' - :type license_type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for - a managed dedicated integration runtime. - :type custom_setup_script_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed - dedicated integration runtime. - :type data_proxy_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible - values include: 'Standard', 'Enterprise' - :type edition: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.catalog_info = catalog_info - self.license_type = license_type - self.custom_setup_script_properties = custom_setup_script_properties - self.data_proxy_properties = data_proxy_properties - self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py deleted file mode 100644 index 64da6347f9ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatus(Model): - """Integration runtime status. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntimeStatus, - ManagedIntegrationRuntimeStatus - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.data_factory_name = None - self.state = None - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py deleted file mode 100644 index 9382b4b08fde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatusListResponse(Model): - """A list of integration runtime status. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. - :type value: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py deleted file mode 100644 index bed71f74ffc6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatusListResponse(Model): - """A list of integration runtime status. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. - :type value: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, *, value, next_link: str=None, **kwargs) -> None: - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py deleted file mode 100644 index 8541e04dc679..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatus(Model): - """Integration runtime status. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntimeStatus, - ManagedIntegrationRuntimeStatus - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.data_factory_name = None - self.state = None - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py deleted file mode 100644 index 901b4d8b7442..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatusResponse(Model): - """Integration runtime status response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The integration runtime name. - :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus - """ - - _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) - self.name = None - self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py deleted file mode 100644 index 64d84a1e4f19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatusResponse(Model): - """Integration runtime status response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The integration runtime name. - :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus - """ - - _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) - self.name = None - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py deleted file mode 100644 index 752b5b99eb60..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeVNetProperties(Model): - """VNet properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param v_net_id: The ID of the VNet that this integration runtime will - join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.v_net_id = kwargs.get('v_net_id', None) - self.subnet = kwargs.get('subnet', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py deleted file mode 100644 index 32e8beb31ea1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeVNetProperties(Model): - """VNet properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param v_net_id: The ID of the VNet that this integration runtime will - join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.v_net_id = v_net_id - self.subnet = subnet diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py deleted file mode 100644 index 517cdd63caa5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class JiraLinkedService(LinkedService): - """Jira Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Jira service. - (e.g. jira.example.com) - :type host: object - :param port: The TCP port that the Jira server uses to listen for client - connections. The default value is 443 if connecting through HTTPS, or 8080 - if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JiraLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py deleted file mode 100644 index 82dc8d578da3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class JiraLinkedService(LinkedService): - """Jira Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Jira service. - (e.g. jira.example.com) - :type host: object - :param port: The TCP port that the Jira server uses to listen for client - connections. The default value is 443 if connecting through HTTPS, or 8080 - if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py deleted file mode 100644 index 1c2b12c18e15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class JiraObjectDataset(Dataset): - """Jira Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JiraObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py deleted file mode 100644 index 3c061b238cde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class JiraObjectDataset(Dataset): - """Jira Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py deleted file mode 100644 index 709da0ce1205..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class JiraSource(CopySource): - """A copy activity Jira Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JiraSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py deleted file mode 100644 index c958c8351bb3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class JiraSource(CopySource): - """A copy activity Jira Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py deleted file mode 100644 index 80f4ff0aaf8b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format import DatasetStorageFormat - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. To be more specific, the way of - separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - :param nesting_separator: The character used to separate nesting levels. - Default value is '.' (dot). Type: string (or Expression with resultType - string). - :type nesting_separator: object - :param encoding_name: The code page name of the preferred encoding. If not - provided, the default value is 'utf-8', unless the byte order mark (BOM) - denotes another Unicode encoding. The full list of supported values can be - found in the 'Name' column of the table of encodings in the following - reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param json_node_reference: The JSONPath of the JSON array element to be - flattened. Example: "$.ArrayPath". Type: string (or Expression with - resultType string). - :type json_node_reference: object - :param json_path_definition: The JSONPath definition for each column - mapping with a customized column name to extract data from JSON file. For - fields under root object, start with "$"; for fields inside the array - chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. - Type: object (or Expression with resultType object). - :type json_path_definition: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JsonFormat, self).__init__(**kwargs) - self.file_pattern = kwargs.get('file_pattern', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.json_node_reference = kwargs.get('json_node_reference', None) - self.json_path_definition = kwargs.get('json_path_definition', None) - self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py deleted file mode 100644 index 2fdb44cc3b7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. To be more specific, the way of - separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - :param nesting_separator: The character used to separate nesting levels. - Default value is '.' (dot). Type: string (or Expression with resultType - string). - :type nesting_separator: object - :param encoding_name: The code page name of the preferred encoding. If not - provided, the default value is 'utf-8', unless the byte order mark (BOM) - denotes another Unicode encoding. The full list of supported values can be - found in the 'Name' column of the table of encodings in the following - reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param json_node_reference: The JSONPath of the JSON array element to be - flattened. Example: "$.ArrayPath". Type: string (or Expression with - resultType string). - :type json_node_reference: object - :param json_path_definition: The JSONPath definition for each column - mapping with a customized column name to extract data from JSON file. For - fields under root object, start with "$"; for fields inside the array - chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. - Type: object (or Expression with resultType object). - :type json_path_definition: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: - super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.file_pattern = file_pattern - self.nesting_separator = nesting_separator - self.encoding_name = encoding_name - self.json_node_reference = json_node_reference - self.json_path_definition = json_path_definition - self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py deleted file mode 100644 index f4a4e7eb8bf0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntime(Model): - """The linked integration runtime information. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: The name of the linked integration runtime. - :vartype name: str - :ivar subscription_id: The subscription ID for which the linked - integration runtime belong to. - :vartype subscription_id: str - :ivar data_factory_name: The name of the data factory for which the linked - integration runtime belong to. - :vartype data_factory_name: str - :ivar data_factory_location: The location of the data factory for which - the linked integration runtime belong to. - :vartype data_factory_location: str - :ivar create_time: The creating time of the linked integration runtime. - :vartype create_time: datetime - """ - - _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntime, self).__init__(**kwargs) - self.name = None - self.subscription_id = None - self.data_factory_name = None - self.data_factory_location = None - self.create_time = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py deleted file mode 100644 index b7be47e8f096..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_integration_runtime_type import LinkedIntegrationRuntimeType - - -class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): - """The key authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.key = kwargs.get('key', None) - self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py deleted file mode 100644 index 4a2ebd8d1003..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType - - -class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): - """The key authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, - } - - def __init__(self, *, key, **kwargs) -> None: - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.key = key - self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py deleted file mode 100644 index 6c831ab5f511..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntime(Model): - """The linked integration runtime information. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: The name of the linked integration runtime. - :vartype name: str - :ivar subscription_id: The subscription ID for which the linked - integration runtime belong to. - :vartype subscription_id: str - :ivar data_factory_name: The name of the data factory for which the linked - integration runtime belong to. - :vartype data_factory_name: str - :ivar data_factory_location: The location of the data factory for which - the linked integration runtime belong to. - :vartype data_factory_location: str - :ivar create_time: The creating time of the linked integration runtime. - :vartype create_time: datetime - """ - - _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs) -> None: - super(LinkedIntegrationRuntime, self).__init__(**kwargs) - self.name = None - self.subscription_id = None - self.data_factory_name = None - self.data_factory_location = None - self.create_time = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py deleted file mode 100644 index 3fbc8dd9cac2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_integration_runtime_type import LinkedIntegrationRuntimeType - - -class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): - """The role based access control (RBAC) authorization type integration - runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration - runtime to be shared. - :type resource_id: str - """ - - _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.resource_id = kwargs.get('resource_id', None) - self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py deleted file mode 100644 index 055b64809e18..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType - - -class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): - """The role based access control (RBAC) authorization type integration - runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration - runtime to be shared. - :type resource_id: str - """ - - _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__(self, *, resource_id: str, **kwargs) -> None: - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.resource_id = resource_id - self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py deleted file mode 100644 index 807757332b3e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntimeRequest(Model): - """Data factory name for linked integration runtime request. - - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked - integration runtime. - :type linked_factory_name: str - """ - - _validation = { - 'linked_factory_name': {'required': True}, - } - - _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = kwargs.get('linked_factory_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py deleted file mode 100644 index 45362ab63ba3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntimeRequest(Model): - """Data factory name for linked integration runtime request. - - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked - integration runtime. - :type linked_factory_name: str - """ - - _validation = { - 'linked_factory_name': {'required': True}, - } - - _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, - } - - def __init__(self, *, linked_factory_name: str, **kwargs) -> None: - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = linked_factory_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py deleted file mode 100644 index 446395bb9cbf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntimeType(Model): - """The base definition of a linked integration runtime. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, - LinkedIntegrationRuntimeKeyAuthorization - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - """ - - _validation = { - 'authorization_type': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - } - - _subtype_map = { - 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py deleted file mode 100644 index 79468dc450d2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntimeType(Model): - """The base definition of a linked integration runtime. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, - LinkedIntegrationRuntimeKeyAuthorization - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - """ - - _validation = { - 'authorization_type': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - } - - _subtype_map = { - 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} - } - - def __init__(self, **kwargs) -> None: - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py deleted file mode 100644 index 2778a33fbb5a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedService(Model): - """The Azure Data Factory nested object which contains the information and - credential which can be used to connect with related store or compute - resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, - RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, - AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, - SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} - } - - def __init__(self, **kwargs): - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.connect_via = kwargs.get('connect_via', None) - self.description = kwargs.get('description', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py deleted file mode 100644 index 2b3e475c3075..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedService(Model): - """The Azure Data Factory nested object which contains the information and - credential which can be used to connect with related store or compute - resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, - RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, - AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, - SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.connect_via = connect_via - self.description = description - self.parameters = parameters - self.annotations = annotations - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py deleted file mode 100644 index 28ffeda7d01a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedServiceReference(Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: - "LinkedServiceReference" . - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__(self, **kwargs): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py deleted file mode 100644 index b6238130bdb6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedServiceReference(Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: - "LinkedServiceReference" . - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py deleted file mode 100644 index 75828718f589..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource import SubResource - - -class LinkedServiceResource(SubResource): - """Linked service resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__(self, **kwargs): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py deleted file mode 100644 index af0a57170e56..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class LinkedServiceResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`LinkedServiceResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[LinkedServiceResource]'} - } - - def __init__(self, *args, **kwargs): - - super(LinkedServiceResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py deleted file mode 100644 index 1fa964b51f57..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class LinkedServiceResource(SubResource): - """Linked service resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py deleted file mode 100644 index 81b4e7ca619e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LogStorageSettings(Model): - """Log storage settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity - execution. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py deleted file mode 100644 index 4850b7adacdf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LogStorageSettings(Model): - """Log storage settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity - execution. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py deleted file mode 100644 index 62584b2f704a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class LookupActivity(ExecutionActivity): - """Lookup activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy - activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default - value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(LookupActivity, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.dataset = kwargs.get('dataset', None) - self.first_row_only = kwargs.get('first_row_only', None) - self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py deleted file mode 100644 index 41061675ebbe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class LookupActivity(ExecutionActivity): - """Lookup activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy - activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default - value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, - } - - def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: - super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.source = source - self.dataset = dataset - self.first_row_only = first_row_only - self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py deleted file mode 100644 index 9d65437b5daa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MagentoLinkedService(LinkedService): - """Magento server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Magento instance. (i.e. - 192.168.222.110/magento3) - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MagentoLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py deleted file mode 100644 index 74de1573118b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MagentoLinkedService(LinkedService): - """Magento server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Magento instance. (i.e. - 192.168.222.110/magento3) - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py deleted file mode 100644 index ad540093ca55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MagentoObjectDataset(Dataset): - """Magento server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MagentoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py deleted file mode 100644 index 481732bb688a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MagentoObjectDataset(Dataset): - """Magento server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py deleted file mode 100644 index df49fe63a544..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MagentoSource(CopySource): - """A copy activity Magento server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MagentoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py deleted file mode 100644 index 15efcc12a054..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MagentoSource(CopySource): - """A copy activity Magento server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py deleted file mode 100644 index 9cbc9e94e7c3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime import IntegrationRuntime - - -class ManagedIntegrationRuntime(IntegrationRuntime): - """Managed integration runtime, including managed elastic and managed - dedicated integration runtimes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :ivar state: Integration runtime state, only valid for managed dedicated - integration runtime. Possible values include: 'Initial', 'Stopped', - 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', - 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param compute_properties: The compute resource for managed integration - runtime. - :type compute_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - """ - - _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntime, self).__init__(**kwargs) - self.state = None - self.compute_properties = kwargs.get('compute_properties', None) - self.ssis_properties = kwargs.get('ssis_properties', None) - self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py deleted file mode 100644 index c70323697fdf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeError(Model): - """Error definition for managed integration runtime. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar time: The time when the error occurred. - :vartype time: datetime - :ivar code: Error code. - :vartype code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar message: Error message. - :vartype message: str - """ - - _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.time = None - self.code = None - self.parameters = None - self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py deleted file mode 100644 index 1668c5196537..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeError(Model): - """Error definition for managed integration runtime. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar time: The time when the error occurred. - :vartype time: datetime - :ivar code: Error code. - :vartype code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar message: Error message. - :vartype message: str - """ - - _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.time = None - self.code = None - self.parameters = None - self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py deleted file mode 100644 index e9c0169cf6c5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeNode(Model): - """Properties of integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_id: The managed integration runtime node id. - :vartype node_id: str - :ivar status: The managed integration runtime node status. Possible values - include: 'Starting', 'Available', 'Recycling', 'Unavailable' - :vartype status: str or - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - """ - - _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_id = None - self.status = None - self.errors = kwargs.get('errors', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py deleted file mode 100644 index 0e8104d0de05..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeNode(Model): - """Properties of integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_id: The managed integration runtime node id. - :vartype node_id: str - :ivar status: The managed integration runtime node status. Possible values - include: 'Starting', 'Available', 'Recycling', 'Unavailable' - :vartype status: str or - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - """ - - _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, - } - - def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_id = None - self.status = None - self.errors = errors diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py deleted file mode 100644 index 2329f7a2ba36..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeOperationResult(Model): - """Properties of managed integration runtime operation result. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar type: The operation type. Could be start or stop. - :vartype type: str - :ivar start_time: The start time of the operation. - :vartype start_time: datetime - :ivar result: The operation result. - :vartype result: str - :ivar error_code: The error code. - :vartype error_code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar activity_id: The activity id for the operation request. - :vartype activity_id: str - """ - - _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - self.start_time = None - self.result = None - self.error_code = None - self.parameters = None - self.activity_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py deleted file mode 100644 index 58a80c0e600e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeOperationResult(Model): - """Properties of managed integration runtime operation result. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar type: The operation type. Could be start or stop. - :vartype type: str - :ivar start_time: The start time of the operation. - :vartype start_time: datetime - :ivar result: The operation result. - :vartype result: str - :ivar error_code: The error code. - :vartype error_code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar activity_id: The activity id for the operation request. - :vartype activity_id: str - """ - - _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - self.start_time = None - self.result = None - self.error_code = None - self.parameters = None - self.activity_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py deleted file mode 100644 index 0e71d8b09f4e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_py3 import IntegrationRuntime - - -class ManagedIntegrationRuntime(IntegrationRuntime): - """Managed integration runtime, including managed elastic and managed - dedicated integration runtimes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :ivar state: Integration runtime state, only valid for managed dedicated - integration runtime. Possible values include: 'Initial', 'Stopped', - 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', - 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param compute_properties: The compute resource for managed integration - runtime. - :type compute_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - """ - - _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.state = None - self.compute_properties = compute_properties - self.ssis_properties = ssis_properties - self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py deleted file mode 100644 index 17d21775f09f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_status import IntegrationRuntimeStatus - - -class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Managed integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] - :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - :ivar last_operation: The last operation result that occurred on this - integration runtime. - :vartype last_operation: - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.create_time = None - self.nodes = None - self.other_errors = None - self.last_operation = None - self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py deleted file mode 100644 index 03d9451045bd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_status_py3 import IntegrationRuntimeStatus - - -class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Managed integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] - :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - :ivar last_operation: The last operation result that occurred on this - integration runtime. - :vartype last_operation: - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.create_time = None - self.nodes = None - self.other_errors = None - self.last_operation = None - self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py deleted file mode 100644 index 3bbe048d4877..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MariaDBLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py deleted file mode 100644 index 475284d56038..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py deleted file mode 100644 index a744c1c5ff8f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MariaDBSource(CopySource): - """A copy activity MariaDB server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MariaDBSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py deleted file mode 100644 index 472877b8f0bb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MariaDBSource(CopySource): - """A copy activity MariaDB server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py deleted file mode 100644 index 66dc9c8ea9b7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MariaDBTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py deleted file mode 100644 index ac3c8cf2ea72..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py deleted file mode 100644 index 2a9e76446122..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MarketoLinkedService(LinkedService): - """Marketo server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Marketo server. (i.e. - 123-ABC-321.mktorest.com) - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MarketoLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py deleted file mode 100644 index dc326f24acd5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MarketoLinkedService(LinkedService): - """Marketo server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Marketo server. (i.e. - 123-ABC-321.mktorest.com) - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py deleted file mode 100644 index 63daa10047b9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MarketoObjectDataset(Dataset): - """Marketo server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MarketoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py deleted file mode 100644 index 7179d5af53dd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MarketoObjectDataset(Dataset): - """Marketo server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py deleted file mode 100644 index 6d2061ef0dee..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MarketoSource(CopySource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MarketoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py deleted file mode 100644 index 573dc0439754..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MarketoSource(CopySource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py deleted file mode 100644 index b53164f6266b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Microsoft Access as ODBC data store. Possible values are: Anonymous and - Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py deleted file mode 100644 index c9f79c24adf3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Microsoft Access as ODBC data store. Possible values are: Anonymous and - Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py deleted file mode 100644 index 53406fa25022..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py deleted file mode 100644 index 700db840c03d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py deleted file mode 100644 index 73cd3a64184c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py deleted file mode 100644 index 1cccd82c8b19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py deleted file mode 100644 index f312dae024f5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py deleted file mode 100644 index 3fad904ef58b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py deleted file mode 100644 index 796c5e14eaca..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. The table name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.collection_name = kwargs.get('collection_name', None) - self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py deleted file mode 100644 index 68fe2affb0e4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. The table name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection_name = collection_name - self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py deleted file mode 100644 index a2d2127d1397..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class MongoDbCursorMethodsProperties(Model): - """Cursor methods for Mongodb query. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param project: Specifies the fields to return in the documents that match - the query filter. To return all fields in the matching documents, omit - this parameter. Type: string (or Expression with resultType string). - :type project: object - :param sort: Specifies the order in which the query returns matching - documents. Type: string (or Expression with resultType string). Type: - string (or Expression with resultType string). - :type sort: object - :param skip: Specifies the how many documents skipped and where MongoDB - begins returning results. This approach may be useful in implementing - paginated results. Type: integer (or Expression with resultType integer). - :type skip: object - :param limit: Specifies the maximum number of documents the server - returns. limit() is analogous to the LIMIT statement in a SQL database. - Type: integer (or Expression with resultType integer). - :type limit: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.project = kwargs.get('project', None) - self.sort = kwargs.get('sort', None) - self.skip = kwargs.get('skip', None) - self.limit = kwargs.get('limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py deleted file mode 100644 index e1e3f50d1539..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class MongoDbCursorMethodsProperties(Model): - """Cursor methods for Mongodb query. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param project: Specifies the fields to return in the documents that match - the query filter. To return all fields in the matching documents, omit - this parameter. Type: string (or Expression with resultType string). - :type project: object - :param sort: Specifies the order in which the query returns matching - documents. Type: string (or Expression with resultType string). Type: - string (or Expression with resultType string). - :type sort: object - :param skip: Specifies the how many documents skipped and where MongoDB - begins returning results. This approach may be useful in implementing - paginated results. Type: integer (or Expression with resultType integer). - :type skip: object - :param limit: Specifies the maximum number of documents the server - returns. limit() is analogous to the LIMIT statement in a SQL database. - Type: integer (or Expression with resultType integer). - :type limit: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.project = project - self.sort = sort - self.skip = skip - self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py deleted file mode 100644 index 76d162b0ff70..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py +++ /dev/null @@ -1,109 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The IP address or server name of the MongoDB - server. Type: string (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the MongoDB database. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you - want to access. Type: string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: - string (or Expression with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen - for client connections. The default value is 27017. Type: integer (or - Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.database_name = kwargs.get('database_name', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.auth_source = kwargs.get('auth_source', None) - self.port = kwargs.get('port', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py deleted file mode 100644 index 95308b6ea8f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py +++ /dev/null @@ -1,109 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The IP address or server name of the MongoDB - server. Type: string (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the MongoDB database. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you - want to access. Type: string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: - string (or Expression with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen - for client connections. The default value is 27017. Type: integer (or - Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.authentication_type = authentication_type - self.database_name = database_name - self.username = username - self.password = password - self.auth_source = auth_source - self.port = port - self.enable_ssl = enable_ssl - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py deleted file mode 100644 index 3da4b931f5e5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MongoDbSource(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression. Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py deleted file mode 100644 index ab3e5b6e0cc9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MongoDbSource(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression. Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py deleted file mode 100644 index 17089373d4c5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.collection = kwargs.get('collection', None) - self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py deleted file mode 100644 index ad1e5c538645..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection = collection - self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py deleted file mode 100644 index bb29fc767420..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The MongoDB connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want - to access. Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.database = kwargs.get('database', None) - self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py deleted file mode 100644 index d1388ce797a5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The MongoDB connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want - to access. Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py deleted file mode 100644 index e951674a8e22..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MongoDbV2Source(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbV2Source, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py deleted file mode 100644 index 9b8eec114a06..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MongoDbV2Source(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size - self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py deleted file mode 100644 index 1be28aa1b6ab..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger import Trigger - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to - pipeline. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - } - - _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} - } - - def __init__(self, **kwargs): - super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.pipelines = kwargs.get('pipelines', None) - self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py deleted file mode 100644 index 206ab74ef419..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_py3 import Trigger - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to - pipeline. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - } - - _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} - } - - def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipelines = pipelines - self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py deleted file mode 100644 index ec85b0136714..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MySqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py deleted file mode 100644 index b8038df22fd6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py deleted file mode 100644 index c2b0b66eabb1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class MySqlSource(CopySource): - """A copy activity source for MySQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MySqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py deleted file mode 100644 index 3a0315d83979..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MySqlSource(CopySource): - """A copy activity source for MySQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py deleted file mode 100644 index 3bb1584975d5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The MySQL table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MySqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py deleted file mode 100644 index 33263561dfde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The MySQL table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py deleted file mode 100644 index 5d94bdecaf62..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class NetezzaLinkedService(LinkedService): - """Netezza linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(NetezzaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py deleted file mode 100644 index 2fcc288fd5b7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class NetezzaLinkedService(LinkedService): - """Netezza linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py deleted file mode 100644 index b6c1ca9ba5da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class NetezzaPartitionSettings(Model): - """The settings that will be leveraged for Netezza source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py deleted file mode 100644 index 9f071eae60ff..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class NetezzaPartitionSettings(Model): - """The settings that will be leveraged for Netezza source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py deleted file mode 100644 index 3c66032bf48d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class NetezzaSource(CopySource): - """A copy activity Netezza source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - :param partition_option: The partition mechanism that will be used for - Netezza read in parallel. Possible values include: 'None', 'DataSlice', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.NetezzaPartitionOption - :param partition_settings: The settings that will be leveraged for Netezza - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.NetezzaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, - } - - def __init__(self, **kwargs): - super(NetezzaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py deleted file mode 100644 index f5dcc07e63d8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class NetezzaSource(CopySource): - """A copy activity Netezza source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - :param partition_option: The partition mechanism that will be used for - Netezza read in parallel. Possible values include: 'None', 'DataSlice', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.NetezzaPartitionOption - :param partition_settings: The settings that will be leveraged for Netezza - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.NetezzaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py deleted file mode 100644 index cf3b9205846c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(NetezzaTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py deleted file mode 100644 index 39de0032e8c9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py deleted file mode 100644 index 01db8d71e924..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the OData service endpoint. Type: string - (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', - 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) - under which your application resides. Type: string (or Expression with - resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your - application registered in Azure Active Directory. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting - authorization to use Directory. Type: string (or Expression with - resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type - (key or cert) is used for service principal. Possible values include: - 'ServicePrincipalKey', 'ServicePrincipalCert' - :type aad_service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application - registered in Azure Active Directory. Type: string (or Expression with - resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded - certificate of your application registered in Azure Active Directory. - Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: - ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of - your certificate if your certificate has a password and you are using - AadServicePrincipal authentication. Type: string (or Expression with - resultType string). - :type service_principal_embedded_cert_password: - ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ODataLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.tenant = kwargs.get('tenant', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) - self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py deleted file mode 100644 index fcf2d8bb9819..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the OData service endpoint. Type: string - (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', - 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) - under which your application resides. Type: string (or Expression with - resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your - application registered in Azure Active Directory. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting - authorization to use Directory. Type: string (or Expression with - resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type - (key or cert) is used for service principal. Possible values include: - 'ServicePrincipalKey', 'ServicePrincipalCert' - :type aad_service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application - registered in Azure Active Directory. Type: string (or Expression with - resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded - certificate of your application registered in Azure Active Directory. - Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: - ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of - your certificate if your certificate has a password and you are using - AadServicePrincipal authentication. Type: string (or Expression with - resultType string). - :type service_principal_embedded_cert_password: - ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: - super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.tenant = tenant - self.service_principal_id = service_principal_id - self.aad_resource_id = aad_resource_id - self.aad_service_principal_credential_type = aad_service_principal_credential_type - self.service_principal_key = service_principal_key - self.service_principal_embedded_cert = service_principal_embedded_cert - self.service_principal_embedded_cert_password = service_principal_embedded_cert_password - self.encrypted_credential = encrypted_credential - self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py deleted file mode 100644 index 658cf40c8d2b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: The OData resource path. Type: string (or Expression with - resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ODataResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py deleted file mode 100644 index 5951a2cf6d80..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: The OData resource path. Type: string (or Expression with - resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: - super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py deleted file mode 100644 index c70f440ff6cb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ODataSource(CopySource): - """A copy activity source for OData source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ODataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py deleted file mode 100644 index 83ba9bd7f2af..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ODataSource(CopySource): - """A copy activity source for OData source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py deleted file mode 100644 index 53d21dee2def..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - ODBC data store. Possible values are: Anonymous and Basic. Type: string - (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py deleted file mode 100644 index 2e376d23c67a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - ODBC data store. Possible values are: Anonymous and Basic. Type: string - (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py deleted file mode 100644 index ced7e1dbd9e4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class OdbcSink(CopySink): - """A copy activity ODBC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py deleted file mode 100644 index 9a181f8df7e9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class OdbcSink(CopySink): - """A copy activity ODBC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py deleted file mode 100644 index 9761d0c0aeb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class OdbcSource(CopySource): - """A copy activity source for ODBC databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py deleted file mode 100644 index 52b059a8ad91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class OdbcSource(CopySource): - """A copy activity source for ODBC databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py deleted file mode 100644 index 2f4f4261f4fc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class OdbcTableDataset(Dataset): - """The ODBC table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The ODBC table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py deleted file mode 100644 index 070ddccd180d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class OdbcTableDataset(Dataset): - """The ODBC table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The ODBC table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py deleted file mode 100644 index baa90666d669..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class Office365Dataset(Dataset): - """The Office365 account. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. Name of the dataset to extract from Office - 365. Type: string (or Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the - specific rows to extract from Office 365. Type: string (or Expression with - resultType string). - :type predicate: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Office365Dataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.predicate = kwargs.get('predicate', None) - self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py deleted file mode 100644 index 5517f7daf9e3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class Office365Dataset(Dataset): - """The Office365 account. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. Name of the dataset to extract from Office - 365. Type: string (or Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the - specific rows to extract from Office 365. Type: string (or Expression with - resultType string). - :type predicate: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: - super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.predicate = predicate - self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py deleted file mode 100644 index 2dc98897482a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param office365_tenant_id: Required. Azure tenant ID to which the Office - 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant - information under which your Azure AD web application resides. Type: - string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Office365LinkedService, self).__init__(**kwargs) - self.office365_tenant_id = kwargs.get('office365_tenant_id', None) - self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py deleted file mode 100644 index 5a69c0d895fa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param office365_tenant_id: Required. Azure tenant ID to which the Office - 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant - information under which your Azure AD web application resides. Type: - string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: - super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.office365_tenant_id = office365_tenant_id - self.service_principal_tenant_id = service_principal_tenant_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.encrypted_credential = encrypted_credential - self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py deleted file mode 100644 index de19818aaa7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class Office365Source(CopySource): - """A copy activity source for an Office365 service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param allowed_groups: The groups containing all the users. Type: array of - strings (or Expression with resultType array of strings). - :type allowed_groups: object - :param user_scope_filter_uri: The user scope uri. Type: string (or - Expression with resultType string). - :type user_scope_filter_uri: object - :param date_filter_column: The Column to apply the and . Type: string (or - Expression with resultType string). - :type date_filter_column: object - :param start_time: Start time of the requested range for this dataset. - Type: string (or Expression with resultType string). - :type start_time: object - :param end_time: End time of the requested range for this dataset. Type: - string (or Expression with resultType string). - :type end_time: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Office365Source, self).__init__(**kwargs) - self.allowed_groups = kwargs.get('allowed_groups', None) - self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) - self.date_filter_column = kwargs.get('date_filter_column', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py deleted file mode 100644 index fc2c4b095904..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class Office365Source(CopySource): - """A copy activity source for an Office365 service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param allowed_groups: The groups containing all the users. Type: array of - strings (or Expression with resultType array of strings). - :type allowed_groups: object - :param user_scope_filter_uri: The user scope uri. Type: string (or - Expression with resultType string). - :type user_scope_filter_uri: object - :param date_filter_column: The Column to apply the and . Type: string (or - Expression with resultType string). - :type date_filter_column: object - :param start_time: Start time of the requested range for this dataset. - Type: string (or Expression with resultType string). - :type start_time: object - :param end_time: End time of the requested range for this dataset. Type: - string (or Expression with resultType string). - :type end_time: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.allowed_groups = allowed_groups - self.user_scope_filter_uri = user_scope_filter_uri - self.date_filter_column = date_filter_column - self.start_time = start_time - self.end_time = end_time - self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py deleted file mode 100644 index db8cde8db784..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Operation(Model): - """Azure Data Factory API operation definition. - - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: - ~azure.mgmt.datafactory.models.OperationServiceSpecification - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, - } - - def __init__(self, **kwargs): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.origin = kwargs.get('origin', None) - self.display = kwargs.get('display', None) - self.service_specification = kwargs.get('service_specification', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py deleted file mode 100644 index 1d96541c0581..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationDisplay(Model): - """Metadata associated with the operation. - - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is - performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OperationDisplay, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py deleted file mode 100644 index dfbb782627f4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationDisplay(Model): - """Metadata associated with the operation. - - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is - performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: - super(OperationDisplay, self).__init__(**kwargs) - self.description = description - self.provider = provider - self.resource = resource - self.operation = operation diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py deleted file mode 100644 index 93bfaf4ed0de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationLogSpecification(Model): - """Details about an operation related to logs. - - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.blob_duration = kwargs.get('blob_duration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py deleted file mode 100644 index 2cdd941fab7b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationLogSpecification(Model): - """Details about an operation related to logs. - - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py deleted file mode 100644 index 974e0cbf4b0b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricAvailability(Model): - """Defines how often data for a metric becomes available. - - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = kwargs.get('time_grain', None) - self.blob_duration = kwargs.get('blob_duration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py deleted file mode 100644 index 312b83a23701..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricAvailability(Model): - """Defines how often data for a metric becomes available. - - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = time_grain - self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py deleted file mode 100644 index 24232e7b5470..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricDimension(Model): - """Defines the metric dimension. - - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be - exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py deleted file mode 100644 index 1d8610b7fab8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricDimension(Model): - """Defines the metric dimension. - - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be - exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.to_be_exported_for_shoebox = to_be_exported_for_shoebox diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py deleted file mode 100644 index 77f533fdcebf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricSpecification(Model): - """Details about an operation related to metrics. - - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using - regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes - available. - :type availabilities: - list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: - list[~azure.mgmt.datafactory.models.OperationMetricDimension] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, - } - - def __init__(self, **kwargs): - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.display_description = kwargs.get('display_description', None) - self.unit = kwargs.get('unit', None) - self.aggregation_type = kwargs.get('aggregation_type', None) - self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) - self.source_mdm_account = kwargs.get('source_mdm_account', None) - self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) - self.availabilities = kwargs.get('availabilities', None) - self.dimensions = kwargs.get('dimensions', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py deleted file mode 100644 index c1cc4ad39e72..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricSpecification(Model): - """Details about an operation related to metrics. - - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using - regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes - available. - :type availabilities: - list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: - list[~azure.mgmt.datafactory.models.OperationMetricDimension] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.display_description = display_description - self.unit = unit - self.aggregation_type = aggregation_type - self.enable_regional_mdm_account = enable_regional_mdm_account - self.source_mdm_account = source_mdm_account - self.source_mdm_namespace = source_mdm_namespace - self.availabilities = availabilities - self.dimensions = dimensions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py deleted file mode 100644 index d6eea01bbdb9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class OperationPaged(Paged): - """ - A paging container for iterating over a list of :class:`Operation ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[Operation]'} - } - - def __init__(self, *args, **kwargs): - - super(OperationPaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py deleted file mode 100644 index 23305038a090..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Operation(Model): - """Azure Data Factory API operation definition. - - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: - ~azure.mgmt.datafactory.models.OperationServiceSpecification - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, - } - - def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: - super(Operation, self).__init__(**kwargs) - self.name = name - self.origin = origin - self.display = display - self.service_specification = service_specification diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py deleted file mode 100644 index 82622a44af5a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationServiceSpecification(Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, - } - - def __init__(self, **kwargs): - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = kwargs.get('log_specifications', None) - self.metric_specifications = kwargs.get('metric_specifications', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py deleted file mode 100644 index 4215dac6eb7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationServiceSpecification(Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, - } - - def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = log_specifications - self.metric_specifications = metric_specifications diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py deleted file mode 100644 index 19f715dfd9e2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class OracleLinkedService(LinkedService): - """Oracle database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py deleted file mode 100644 index a46f0463afb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class OracleLinkedService(LinkedService): - """Oracle database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py deleted file mode 100644 index b4e9aa1b92f3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for Oracle source partitioning. - - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = kwargs.get('partition_names', None) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py deleted file mode 100644 index 10641aab7f9f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for Oracle source partitioning. - - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = partition_names - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py deleted file mode 100644 index 44ce000868b7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py +++ /dev/null @@ -1,95 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle - Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name - that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py deleted file mode 100644 index 8732e2e82ca0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py +++ /dev/null @@ -1,95 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle - Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name - that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py deleted file mode 100644 index 35ce3439d8a0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py deleted file mode 100644 index a478e1abc828..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py deleted file mode 100644 index f42291941393..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class OracleServiceCloudSource(CopySource): - """A copy activity Oracle Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleServiceCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py deleted file mode 100644 index 1fa5d6eb3748..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class OracleServiceCloudSource(CopySource): - """A copy activity Oracle Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py deleted file mode 100644 index 1f6c747c49db..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class OracleSink(CopySink): - """A copy activity Oracle sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py deleted file mode 100644 index 3a571c66732a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class OracleSink(CopySink): - """A copy activity Oracle sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py deleted file mode 100644 index db436192eca1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class OracleSource(CopySource): - """A copy activity Oracle source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param oracle_reader_query: Oracle reader query. Type: string (or - Expression with resultType string). - :type oracle_reader_query: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - Oracle read in parallel. Possible values include: 'None', - 'PhysicalPartitionsOfTable', 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.OraclePartitionOption - :param partition_settings: The settings that will be leveraged for Oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - } - - def __init__(self, **kwargs): - super(OracleSource, self).__init__(**kwargs) - self.oracle_reader_query = kwargs.get('oracle_reader_query', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py deleted file mode 100644 index 0a871809896e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class OracleSource(CopySource): - """A copy activity Oracle source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param oracle_reader_query: Oracle reader query. Type: string (or - Expression with resultType string). - :type oracle_reader_query: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - Oracle read in parallel. Possible values include: 'None', - 'PhysicalPartitionsOfTable', 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.OraclePartitionOption - :param partition_settings: The settings that will be leveraged for Oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.oracle_reader_query = oracle_reader_query - self.query_timeout = query_timeout - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py deleted file mode 100644 index c76b5ced3e5c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param oracle_table_dataset_schema: The schema name of the on-premises - Oracle database. Type: string (or Expression with resultType string). - :type oracle_table_dataset_schema: object - :param table: The table name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py deleted file mode 100644 index b588fbac5244..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param oracle_table_dataset_schema: The schema name of the on-premises - Oracle database. Type: string (or Expression with resultType string). - :type oracle_table_dataset_schema: object - :param table: The table name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: - super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.oracle_table_dataset_schema = oracle_table_dataset_schema - self.table = table - self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py deleted file mode 100644 index 8f0a0322062c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format import DatasetStorageFormat - - -class OrcFormat(DatasetStorageFormat): - """The data stored in Optimized Row Columnar (ORC) format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OrcFormat, self).__init__(**kwargs) - self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py deleted file mode 100644 index 40a0e389ccc3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class OrcFormat(DatasetStorageFormat): - """The data stored in Optimized Row Columnar (ORC) format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py deleted file mode 100644 index aef855d955f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ParameterSpecification(Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: 'Object', - 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.default_value = kwargs.get('default_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py deleted file mode 100644 index d5b6f981d365..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ParameterSpecification(Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: 'Object', - 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, *, type, default_value=None, **kwargs) -> None: - super(ParameterSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py deleted file mode 100644 index ffaf8e1f6d93..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ParquetDataset(Dataset): - """Parquet dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: - :type compression_codec: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ParquetDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py deleted file mode 100644 index 4d754450ce15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ParquetDataset(Dataset): - """Parquet dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: - :type compression_codec: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.compression_codec = compression_codec - self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py deleted file mode 100644 index d742ff24b522..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format import DatasetStorageFormat - - -class ParquetFormat(DatasetStorageFormat): - """The data stored in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ParquetFormat, self).__init__(**kwargs) - self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py deleted file mode 100644 index 36a6f5c88c4d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class ParquetFormat(DatasetStorageFormat): - """The data stored in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py deleted file mode 100644 index dea3e0f8fc52..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class ParquetSink(CopySink): - """A copy activity Parquet sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, **kwargs): - super(ParquetSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py deleted file mode 100644 index 463044fef83f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class ParquetSink(CopySink): - """A copy activity Parquet sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py deleted file mode 100644 index ab888c7361a2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ParquetSource(CopySource): - """A copy activity Parquet source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(ParquetSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py deleted file mode 100644 index 332a7b9b8c5e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ParquetSource(CopySource): - """A copy activity Parquet source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py deleted file mode 100644 index d7ae0bc075e7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the PayPal instance. (i.e. - api.sandbox.paypal.com) - :type host: object - :param client_id: Required. The client ID associated with your PayPal - application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PaypalLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py deleted file mode 100644 index c11cda7a52f3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the PayPal instance. (i.e. - api.sandbox.paypal.com) - :type host: object - :param client_id: Required. The client ID associated with your PayPal - application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py deleted file mode 100644 index d0fdc678841b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PaypalObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py deleted file mode 100644 index 55df7c97166d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py deleted file mode 100644 index 94cdbccae6ee..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class PaypalSource(CopySource): - """A copy activity Paypal Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PaypalSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py deleted file mode 100644 index 05730d0ae067..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PaypalSource(CopySource): - """A copy activity Paypal Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py deleted file mode 100644 index 308a8e4cf592..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py +++ /dev/null @@ -1,121 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Phoenix server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for - client connections. The default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. - (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix - if using WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Phoenix server. Possible values include: 'Anonymous', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PhoenixLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py deleted file mode 100644 index de8210c2cc89..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py +++ /dev/null @@ -1,121 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Phoenix server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for - client connections. The default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. - (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix - if using WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Phoenix server. Possible values include: 'Anonymous', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py deleted file mode 100644 index ccaa2eb49abd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression - with resultType string). - :type table: object - :param phoenix_object_dataset_schema: The schema name of the Phoenix. - Type: string (or Expression with resultType string). - :type phoenix_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PhoenixObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) - self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py deleted file mode 100644 index cda4dc41dc22..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression - with resultType string). - :type table: object - :param phoenix_object_dataset_schema: The schema name of the Phoenix. - Type: string (or Expression with resultType string). - :type phoenix_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: - super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.phoenix_object_dataset_schema = phoenix_object_dataset_schema - self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py deleted file mode 100644 index 30171c6177ff..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class PhoenixSource(CopySource): - """A copy activity Phoenix server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PhoenixSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py deleted file mode 100644 index 1384f59e1aa4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PhoenixSource(CopySource): - """A copy activity Phoenix server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py deleted file mode 100644 index bebc05cb1824..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineFolder(Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear - at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py deleted file mode 100644 index 02c9b8dbbff1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineFolder(Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear - at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, **kwargs) -> None: - super(PipelineFolder, self).__init__(**kwargs) - self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py deleted file mode 100644 index aa8b23e62932..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineReference(Model): - """Pipeline reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: - "PipelineReference" . - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - type = "PipelineReference" - - def __init__(self, **kwargs): - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py deleted file mode 100644 index ce63f06092d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineReference(Model): - """Pipeline reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: - "PipelineReference" . - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - type = "PipelineReference" - - def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py deleted file mode 100644 index a39deaccc87b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource import SubResource - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, - ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the - Pipeline. - :type annotations: list[object] - :param folder: The folder that this Pipeline is in. If not specified, - Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - } - - def __init__(self, **kwargs): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.activities = kwargs.get('activities', None) - self.parameters = kwargs.get('parameters', None) - self.variables = kwargs.get('variables', None) - self.concurrency = kwargs.get('concurrency', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py deleted file mode 100644 index a7c7ed553c07..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class PipelineResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`PipelineResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[PipelineResource]'} - } - - def __init__(self, *args, **kwargs): - - super(PipelineResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py deleted file mode 100644 index 8299cdb73887..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, - ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the - Pipeline. - :type annotations: list[object] - :param folder: The folder that this Pipeline is in. If not specified, - Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.activities = activities - self.parameters = parameters - self.variables = variables - self.concurrency = concurrency - self.annotations = annotations - self.folder = folder diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py deleted file mode 100644 index a2407bd9835f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRun(Model): - """Information about a pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a - pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in - its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair - used in the pipeline run. - :vartype parameters: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event - in ISO8601 format. - :vartype last_updated: datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str - """ - - _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py deleted file mode 100644 index acefb80fd078..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRunInvokedBy(Model): - """Provides entity name and id that started the pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py deleted file mode 100644 index c954a18b8a67..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRunInvokedBy(Model): - """Provides entity name and id that started the pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py deleted file mode 100644 index 33e0f23f24ac..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRun(Model): - """Information about a pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a - pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in - its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair - used in the pipeline run. - :vartype parameters: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event - in ISO8601 format. - :vartype last_updated: datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str - """ - - _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py deleted file mode 100644 index c4591c5467ba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRunsQueryResponse(Model): - """A list pipeline runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py deleted file mode 100644 index fbc689ec1632..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRunsQueryResponse(Model): - """A list pipeline runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py deleted file mode 100644 index 5a261d8fea84..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PolybaseSettings(Model): - """PolyBase settings. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param reject_type: Reject type. Possible values include: 'value', - 'percentage' - :type reject_type: str or - ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that - can be rejected before the query fails. Type: number (or Expression with - resultType number), minimum: 0. - :type reject_value: object - :param reject_sample_value: Determines the number of rows to attempt to - retrieve before the PolyBase recalculates the percentage of rejected rows. - Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object - :param use_type_default: Specifies how to handle missing values in - delimited text files when PolyBase retrieves data from the text file. - Type: boolean (or Expression with resultType boolean). - :type use_type_default: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reject_type = kwargs.get('reject_type', None) - self.reject_value = kwargs.get('reject_value', None) - self.reject_sample_value = kwargs.get('reject_sample_value', None) - self.use_type_default = kwargs.get('use_type_default', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py deleted file mode 100644 index baae78b14c5f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PolybaseSettings(Model): - """PolyBase settings. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param reject_type: Reject type. Possible values include: 'value', - 'percentage' - :type reject_type: str or - ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that - can be rejected before the query fails. Type: number (or Expression with - resultType number), minimum: 0. - :type reject_value: object - :param reject_sample_value: Determines the number of rows to attempt to - retrieve before the PolyBase recalculates the percentage of rejected rows. - Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object - :param use_type_default: Specifies how to handle missing values in - delimited text files when PolyBase retrieves data from the text file. - Type: boolean (or Expression with resultType boolean). - :type use_type_default: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.reject_type = reject_type - self.reject_value = reject_value - self.reject_sample_value = reject_sample_value - self.use_type_default = use_type_default diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py deleted file mode 100644 index f8ce5bd0803e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py deleted file mode 100644 index 0221aa620064..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py deleted file mode 100644 index 51dd25b25c60..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class PostgreSqlSource(CopySource): - """A copy activity source for PostgreSQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PostgreSqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py deleted file mode 100644 index 8aa12e4bdf8d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PostgreSqlSource(CopySource): - """A copy activity source for PostgreSQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py deleted file mode 100644 index 1c61e808abc9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The PostgreSQL table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py deleted file mode 100644 index 00cd65a84cd4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The PostgreSQL table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py deleted file mode 100644 index 21f18f07b262..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class PrestoLinkedService(LinkedService): - """Presto server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Presto server. - (i.e. 192.168.222.160) - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. - 0.148-t) - :type server_version: object - :param catalog: Required. The catalog context for all request against the - server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client - connections. The default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid - values for this option are specified in the IANA Time Zone Database. The - default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PrestoLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.server_version = kwargs.get('server_version', None) - self.catalog = kwargs.get('catalog', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.time_zone_id = kwargs.get('time_zone_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py deleted file mode 100644 index 75ab99d5a58f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PrestoLinkedService(LinkedService): - """Presto server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Presto server. - (i.e. 192.168.222.160) - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. - 0.148-t) - :type server_version: object - :param catalog: Required. The catalog context for all request against the - server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client - connections. The default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid - values for this option are specified in the IANA Time Zone Database. The - default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: - super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.server_version = server_version - self.catalog = catalog - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.time_zone_id = time_zone_id - self.encrypted_credential = encrypted_credential - self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py deleted file mode 100644 index eb80e1a97750..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class PrestoObjectDataset(Dataset): - """Presto server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression - with resultType string). - :type table: object - :param presto_object_dataset_schema: The schema name of the Presto. Type: - string (or Expression with resultType string). - :type presto_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PrestoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) - self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py deleted file mode 100644 index e3bd2f7e36aa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PrestoObjectDataset(Dataset): - """Presto server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression - with resultType string). - :type table: object - :param presto_object_dataset_schema: The schema name of the Presto. Type: - string (or Expression with resultType string). - :type presto_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: - super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.presto_object_dataset_schema = presto_object_dataset_schema - self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py deleted file mode 100644 index 9b7274011265..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class PrestoSource(CopySource): - """A copy activity Presto server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PrestoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py deleted file mode 100644 index 47fe3eb5f790..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PrestoSource(CopySource): - """A copy activity Presto server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py deleted file mode 100644 index 6353c1cda96a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com) - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to - authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 - authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 - authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 - authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth - 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(QuickBooksLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.company_id = kwargs.get('company_id', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.consumer_secret = kwargs.get('consumer_secret', None) - self.access_token = kwargs.get('access_token', None) - self.access_token_secret = kwargs.get('access_token_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py deleted file mode 100644 index be12fc5cfba5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com) - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to - authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 - authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 - authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 - authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth - 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: - super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.company_id = company_id - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self.access_token = access_token - self.access_token_secret = access_token_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.encrypted_credential = encrypted_credential - self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py deleted file mode 100644 index 73446d0ed938..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py deleted file mode 100644 index 65f67d2b20af..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py deleted file mode 100644 index cce0a026ae5a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class QuickBooksSource(CopySource): - """A copy activity QuickBooks server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(QuickBooksSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py deleted file mode 100644 index a00f35d4e1c1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class QuickBooksSource(CopySource): - """A copy activity QuickBooks server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py deleted file mode 100644 index f23d452392b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RecurrenceSchedule(Model): - """The recurrence schedule. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, - } - - def __init__(self, **kwargs): - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.minutes = kwargs.get('minutes', None) - self.hours = kwargs.get('hours', None) - self.week_days = kwargs.get('week_days', None) - self.month_days = kwargs.get('month_days', None) - self.monthly_occurrences = kwargs.get('monthly_occurrences', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py deleted file mode 100644 index bbbe1fa28f17..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RecurrenceScheduleOccurrence(Model): - """The recurrence schedule occurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param day: The day of the week. Possible values include: 'Sunday', - 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'DayOfWeek'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.day = kwargs.get('day', None) - self.occurrence = kwargs.get('occurrence', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py deleted file mode 100644 index 10aea1f00163..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RecurrenceScheduleOccurrence(Model): - """The recurrence schedule occurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param day: The day of the week. Possible values include: 'Sunday', - 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'DayOfWeek'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, - } - - def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.day = day - self.occurrence = occurrence diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py deleted file mode 100644 index fbe44fa3f021..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RecurrenceSchedule(Model): - """The recurrence schedule. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, - } - - def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.minutes = minutes - self.hours = hours - self.week_days = week_days - self.month_days = month_days - self.monthly_occurrences = monthly_occurrences diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py deleted file mode 100644 index a2e3bddb9425..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RedirectIncompatibleRowSettings(Model): - """Redirect incompatible row settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage - SAS, or Azure Data Lake Store linked service used for redirecting - incompatible row. Must be specified if redirectIncompatibleRowSettings is - specified. Type: string (or Expression with resultType string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py deleted file mode 100644 index b47878ef4354..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RedirectIncompatibleRowSettings(Model): - """Redirect incompatible row settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage - SAS, or Azure Data Lake Store linked service used for redirecting - incompatible row. Must be specified if redirectIncompatibleRowSettings is - specified. Type: string (or Expression with resultType string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py deleted file mode 100644 index 7114b85e10db..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RedshiftUnloadSettings(Model): - """The Amazon S3 settings needed for the interim Amazon S3 when copying from - Amazon Redshift with unload. With this, data from Amazon Redshift source - will be unloaded into S3 first and then copied into the targeted sink from - the interim S3. - - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked - service which will be used for the unload operation when copying from the - Amazon Redshift source. - :type s3_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which - will be used to store the unloaded data from Amazon Redshift source. The - bucket must be in the same region as the Amazon Redshift source. Type: - string (or Expression with resultType string). - :type bucket_name: object - """ - - _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) - self.bucket_name = kwargs.get('bucket_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py deleted file mode 100644 index a40d014a32f9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RedshiftUnloadSettings(Model): - """The Amazon S3 settings needed for the interim Amazon S3 when copying from - Amazon Redshift with unload. With this, data from Amazon Redshift source - will be unloaded into S3 first and then copied into the targeted sink from - the interim S3. - - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked - service which will be used for the unload operation when copying from the - Amazon Redshift source. - :type s3_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which - will be used to store the unloaded data from Amazon Redshift source. The - bucket must be in the same region as the Amazon Redshift source. Type: - string (or Expression with resultType string). - :type bucket_name: object - """ - - _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - } - - def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = s3_linked_service_name - self.bucket_name = bucket_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py deleted file mode 100644 index 2450f31222df..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class RelationalSource(CopySource): - """A copy activity source for various relational databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RelationalSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py deleted file mode 100644 index f88383cbd729..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class RelationalSource(CopySource): - """A copy activity source for various relational databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py deleted file mode 100644 index e5dd2e0786c8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class RelationalTableDataset(Dataset): - """The relational table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The relational table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RelationalTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py deleted file mode 100644 index 3c85d95f8033..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class RelationalTableDataset(Dataset): - """The relational table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The relational table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py deleted file mode 100644 index 8de6a70ecc99..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource import SubResource - - -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, **kwargs): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py deleted file mode 100644 index 23d971c1082e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class RerunTriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`RerunTriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py deleted file mode 100644 index 19814ad0d76f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py deleted file mode 100644 index 8c5ca2d67f3c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger import Trigger - - -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows - from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period - for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: datetime - :param requested_end_time: Required. The end time for the time period for - which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.parent_trigger = kwargs.get('parent_trigger', None) - self.requested_start_time = kwargs.get('requested_start_time', None) - self.requested_end_time = kwargs.get('requested_end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py deleted file mode 100644 index 4b87f070b6be..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py deleted file mode 100644 index 6fadecca588b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py deleted file mode 100644 index 4a7a20759c1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_py3 import Trigger - - -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows - from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period - for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: datetime - :param requested_end_time: Required. The end time for the time period for - which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.parent_trigger = parent_trigger - self.requested_start_time = requested_start_time - self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency - self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py deleted file mode 100644 index f6b2d7d3b512..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Resource(Model): - """Azure Data Factory top-level resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py deleted file mode 100644 index cfc0e4b09aa5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Resource(Model): - """Azure Data Factory top-level resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location - self.tags = tags - self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py deleted file mode 100644 index 16d1af502787..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ResponsysLinkedService(LinkedService): - """Responsys linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ResponsysLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py deleted file mode 100644 index 6d8a74a0a34b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ResponsysLinkedService(LinkedService): - """Responsys linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py deleted file mode 100644 index f459e69113a1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ResponsysObjectDataset(Dataset): - """Responsys dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ResponsysObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py deleted file mode 100644 index c5f375910aaf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ResponsysObjectDataset(Dataset): - """Responsys dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py deleted file mode 100644 index fd25b8e71377..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ResponsysSource(CopySource): - """A copy activity Responsys source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ResponsysSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py deleted file mode 100644 index 8d5e4ac091f7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ResponsysSource(CopySource): - """A copy activity Responsys source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py deleted file mode 100644 index 9a5d41858e54..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class RestResourceDataset(Dataset): - """A Rest service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL to the resource that the RESTful API - provides. Type: string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RestResourceDataset, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py deleted file mode 100644 index 99f39c97f373..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class RestResourceDataset(Dataset): - """A Rest service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL to the resource that the RESTful API - provides. Type: string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: - super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py deleted file mode 100644 index 0fbb15654438..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server - side SSL certificate when connecting to the endpoint.The default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to - connect to the REST service. Possible values include: 'Anonymous', - 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: object - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The application's client ID used in - AadServicePrincipal authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in - AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in - AadServicePrincipal authentication type under which your application - resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to - use. - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RestServiceLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py deleted file mode 100644 index 9af9f609e52b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server - side SSL certificate when connecting to the endpoint.The default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to - connect to the REST service. Possible values include: 'Anonymous', - 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: object - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The application's client ID used in - AadServicePrincipal authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in - AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in - AadServicePrincipal authentication type under which your application - resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to - use. - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.enable_server_certificate_validation = enable_server_certificate_validation - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py deleted file mode 100644 index f32d4d67e427..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class RestSource(CopySource): - """A copy activity Rest service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP - response. It is the timeout to get a response, not the timeout to read - response data. Default value: 00:01:40. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next page - request. - :type request_interval: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RestSource, self).__init__(**kwargs) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.request_interval = kwargs.get('request_interval', None) - self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py deleted file mode 100644 index 5fcbb2f7a76d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class RestSource(CopySource): - """A copy activity Rest service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP - response. It is the timeout to get a response, not the timeout to read - response data. Default value: 00:01:40. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next page - request. - :type request_interval: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.http_request_timeout = http_request_timeout - self.request_interval = request_interval - self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py deleted file mode 100644 index e6f5b1876259..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RetryPolicy(Model): - """Execution policy for an activity. - - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type count: object - :param interval_in_seconds: Interval between retries in seconds. Default - is 30. - :type interval_in_seconds: int - """ - - _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RetryPolicy, self).__init__(**kwargs) - self.count = kwargs.get('count', None) - self.interval_in_seconds = kwargs.get('interval_in_seconds', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py deleted file mode 100644 index b51b87a49938..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RetryPolicy(Model): - """Execution policy for an activity. - - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type count: object - :param interval_in_seconds: Interval between retries in seconds. Default - is 30. - :type interval_in_seconds: int - """ - - _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, - } - - def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: - super(RetryPolicy, self).__init__(**kwargs) - self.count = count - self.interval_in_seconds = interval_in_seconds diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py deleted file mode 100644 index 9271f7adf029..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunFilterParameters(Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next - page of results. Null for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run - event was updated in 'ISO 8601' format. - :type last_updated_after: datetime - :param last_updated_before: Required. The time at or before which the run - event was updated in 'ISO 8601' format. - :type last_updated_before: datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__(self, **kwargs): - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = kwargs.get('continuation_token', None) - self.last_updated_after = kwargs.get('last_updated_after', None) - self.last_updated_before = kwargs.get('last_updated_before', None) - self.filters = kwargs.get('filters', None) - self.order_by = kwargs.get('order_by', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py deleted file mode 100644 index c96e64eb63b3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunFilterParameters(Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next - page of results. Null for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run - event was updated in 'ISO 8601' format. - :type last_updated_after: datetime - :param last_updated_before: Required. The time at or before which the run - event was updated in 'ISO 8601' format. - :type last_updated_before: datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = continuation_token - self.last_updated_after = last_updated_after - self.last_updated_before = last_updated_before - self.filters = filters - self.order_by = order_by diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py deleted file mode 100644 index 7d54150a6815..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunQueryFilter(Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The - allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd - and Status; to query activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd, ActivityType and Status, and to query trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values - include: 'Equals', 'NotEquals', 'In', 'NotIn' - :type operator: str or - ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__(self, **kwargs): - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = kwargs.get('operand', None) - self.operator = kwargs.get('operator', None) - self.values = kwargs.get('values', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py deleted file mode 100644 index 814e7a4b499b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunQueryFilter(Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The - allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd - and Status; to query activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd, ActivityType and Status, and to query trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values - include: 'Equals', 'NotEquals', 'In', 'NotIn' - :type operator: str or - ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__(self, *, operand, operator, values, **kwargs) -> None: - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = operand - self.operator = operator - self.values = values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py deleted file mode 100644 index 21afabcf215f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunQueryOrderBy(Model): - """An object to provide order by options for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The - allowed parameters to order by for pipeline runs are PipelineName, - RunStart, RunEnd and Status; for activity runs are ActivityName, - ActivityRunStart, ActivityRunEnd and Status; for trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values - include: 'ASC', 'DESC' - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder - """ - - _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, - } - - _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = kwargs.get('order_by', None) - self.order = kwargs.get('order', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py deleted file mode 100644 index a3ddc8854d47..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunQueryOrderBy(Model): - """An object to provide order by options for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The - allowed parameters to order by for pipeline runs are PipelineName, - RunStart, RunEnd and Status; for activity runs are ActivityName, - ActivityRunStart, ActivityRunEnd and Status; for trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values - include: 'ASC', 'DESC' - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder - """ - - _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, - } - - _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, - } - - def __init__(self, *, order_by, order, **kwargs) -> None: - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = order_by - self.order = order diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py deleted file mode 100644 index c644ac664831..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, - for example, 'https://[domain].my.salesforce.com'. Type: string (or - Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceLinkedService, self).__init__(**kwargs) - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py deleted file mode 100644 index 05fcea7a3990..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, - for example, 'https://[domain].my.salesforce.com'. Type: string (or - Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.encrypted_credential = encrypted_credential - self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py deleted file mode 100644 index 93b4fcdb3d1f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_id: object - :param client_secret: The client secret associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py deleted file mode 100644 index d7e09e27a43f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_id: object - :param client_secret: The client secret associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py deleted file mode 100644 index 20f581ce1c50..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py deleted file mode 100644 index 526ac806649f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py deleted file mode 100644 index 09a0eca1758e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SalesforceMarketingCloudSource(CopySource): - """A copy activity Salesforce Marketing Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceMarketingCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py deleted file mode 100644 index 9b898af0c3a1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SalesforceMarketingCloudSource(CopySource): - """A copy activity Salesforce Marketing Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py deleted file mode 100644 index 10cfce97fe0f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceObjectDataset, self).__init__(**kwargs) - self.object_api_name = kwargs.get('object_api_name', None) - self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py deleted file mode 100644 index 3c3f75d6059e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: - super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.object_api_name = object_api_name - self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py deleted file mode 100644 index fb6476ac9a30..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce Service Cloud instance. - Default is 'https://login.salesforce.com'. To copy data from sandbox, - specify 'https://test.salesforce.com'. To copy data from custom domain, - specify, for example, 'https://[domain].my.salesforce.com'. Type: string - (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param extended_properties: Extended properties appended to the connection - string. Type: string (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py deleted file mode 100644 index 3f0b3cc64d91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce Service Cloud instance. - Default is 'https://login.salesforce.com'. To copy data from sandbox, - specify 'https://test.salesforce.com'. To copy data from custom domain, - specify, for example, 'https://[domain].my.salesforce.com'. Type: string - (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param extended_properties: Extended properties appended to the connection - string. Type: string (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.extended_properties = extended_properties - self.encrypted_credential = encrypted_credential - self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py deleted file mode 100644 index 1f5cb3bb5bf1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce Service Cloud object API name. - Type: string (or Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.object_api_name = kwargs.get('object_api_name', None) - self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py deleted file mode 100644 index d215f5f0084d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce Service Cloud object API name. - Type: string (or Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.object_api_name = object_api_name - self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py deleted file mode 100644 index 99e2b1a2c924..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SalesforceServiceCloudSink(CopySink): - """A copy activity Salesforce Service Cloud sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py deleted file mode 100644 index 2abfaa12d0e7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SalesforceServiceCloudSink(CopySink): - """A copy activity Salesforce Service Cloud sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values - self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py deleted file mode 100644 index 255bfab477bc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) - self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py deleted file mode 100644 index 77bb267f5a47..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.read_behavior = read_behavior - self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py deleted file mode 100644 index 9a1291bd4bfe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SalesforceSink(CopySink): - """A copy activity Salesforce sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py deleted file mode 100644 index 54a56618d01e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SalesforceSink(CopySink): - """A copy activity Salesforce sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values - self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py deleted file mode 100644 index 4f2590c3ab9d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SalesforceSource(CopySource): - """A copy activity Salesforce source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SalesforceSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) - self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py deleted file mode 100644 index 4441e92eaff3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SalesforceSource(CopySource): - """A copy activity Salesforce source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.read_behavior = read_behavior - self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py deleted file mode 100644 index 048d26f85696..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapBwCubeDataset, self).__init__(**kwargs) - self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py deleted file mode 100644 index 08334a824ba4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py deleted file mode 100644 index a57164c7215d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance. Type: string - (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a - two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. - (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapBWLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py deleted file mode 100644 index 92aef25dc215..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance. Type: string - (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a - two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. - (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py deleted file mode 100644 index e3762d8e694e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SapBwSource(CopySource): - """A copy activity source for SapBW server via MDX. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: MDX query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapBwSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py deleted file mode 100644 index ed6ff734742d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapBwSource(CopySource): - """A copy activity source for SapBW server via MDX. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: MDX query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py deleted file mode 100644 index 53d47ab8ae41..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP Cloud for Customer OData API. For - example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: - string (or Expression with resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: object - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py deleted file mode 100644 index 9e47fd696503..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP Cloud for Customer OData API. For - example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: - string (or Expression with resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: object - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py deleted file mode 100644 index 436b251207a4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP Cloud for Customer OData - entity. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py deleted file mode 100644 index 455bad7c9095..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP Cloud for Customer OData - entity. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py deleted file mode 100644 index e5a37858abb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SapCloudForCustomerSink(CopySink): - """A copy activity SAP Cloud for Customer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py deleted file mode 100644 index 29f01fdd6891..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SapCloudForCustomerSink(CopySink): - """A copy activity SAP Cloud for Customer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py deleted file mode 100644 index 561c1b342f93..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SapCloudForCustomerSource(CopySource): - """A copy activity source for SAP Cloud for Customer source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP Cloud for Customer OData query. For example, "$top=1". - Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py deleted file mode 100644 index e9dab6ad1899..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapCloudForCustomerSource(CopySource): - """A copy activity source for SAP Cloud for Customer source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP Cloud for Customer OData query. For example, "$top=1". - Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py deleted file mode 100644 index 0ca69242055f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or - Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapEccLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py deleted file mode 100644 index 7afd76b8fe09..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or - Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: - super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py deleted file mode 100644 index f79367f49b3d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP ECC OData entity. Type: string - (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapEccResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py deleted file mode 100644 index 76aaeb9bb9f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP ECC OData entity. Type: string - (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py deleted file mode 100644 index 6379c33713d4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SapEccSource(CopySource): - """A copy activity source for SAP ECC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapEccSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py deleted file mode 100644 index 4412cac39960..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapEccSource(CopySource): - """A copy activity source for SAP ECC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py deleted file mode 100644 index 14eda87b7be6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: SAP HANA ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the SAP HANA server. Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapHanaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py deleted file mode 100644 index de378a5b2bf3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: SAP HANA ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the SAP HANA server. Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py deleted file mode 100644 index e946dbcd9a50..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SapHanaSource(CopySource): - """A copy activity source for SAP HANA source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP HANA Sql query. Type: string (or Expression with - resultType string). - :type query: object - :param packet_size: The packet size of data read from SAP HANA. Type: - integer(or Expression with resultType integer). - :type packet_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapHanaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.packet_size = kwargs.get('packet_size', None) - self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py deleted file mode 100644 index 730326c19183..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapHanaSource(CopySource): - """A copy activity source for SAP HANA source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP HANA Sql query. Type: string (or Expression with - resultType string). - :type query: object - :param packet_size: The packet size of data read from SAP HANA. Type: - integer(or Expression with resultType integer). - :type packet_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.packet_size = packet_size - self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py deleted file mode 100644 index 6ff1ae31cd22..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: - string (or Expression with resultType string). - :type sap_hana_table_dataset_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapHanaTableDataset, self).__init__(**kwargs) - self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py deleted file mode 100644 index 6dc5c48ba21d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: - string (or Expression with resultType string). - :type sap_hana_table_dataset_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: - super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema - self.table = table - self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py deleted file mode 100644 index bfe9c323d302..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance where the open - hub destination is located. Type: string (or Expression with resultType - string). - :type server: object - :param system_number: Required. System number of the BW system where the - open hub destination is located. (Usually a two-digit decimal number - represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where - the open hub destination is located. (Usually a three-digit decimal number - represented as a string) Type: string (or Expression with resultType - string). - :type client_id: object - :param language: Language of the BW system where the open hub destination - is located. The default value is EN. Type: string (or Expression with - resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub - destination is located. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to access the SAP BW server where the open hub - destination is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py deleted file mode 100644 index eddc50b0f1c5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance where the open - hub destination is located. Type: string (or Expression with resultType - string). - :type server: object - :param system_number: Required. System number of the BW system where the - open hub destination is located. (Usually a two-digit decimal number - represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where - the open hub destination is located. (Usually a three-digit decimal number - represented as a string) Type: string (or Expression with resultType - string). - :type client_id: object - :param language: Language of the BW system where the open hub destination - is located. The default value is EN. Type: string (or Expression with - resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub - destination is located. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to access the SAP BW server where the open hub - destination is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py deleted file mode 100644 index d6dcbda60e36..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SapOpenHubSource(CopySource): - """A copy activity source for SAP Business Warehouse Open Hub Destination - source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapOpenHubSource, self).__init__(**kwargs) - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) - self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py deleted file mode 100644 index 752ffd8554b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapOpenHubSource(CopySource): - """A copy activity source for SAP Business Warehouse Open Hub Destination - source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py deleted file mode 100644 index 2682969c5016..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param open_hub_destination_name: Required. The name of the Open Hub - Destination with destination type as Database Table. Type: string (or - Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) - self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py deleted file mode 100644 index b06a53c10db3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param open_hub_destination_name: Required. The name of the Open Hub - Destination with destination type as Database Table. Type: string (or - Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.open_hub_destination_name = open_hub_destination_name - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py deleted file mode 100644 index 83b76d0a4fdd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py +++ /dev/null @@ -1,140 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Host name of the SAP instance where the table is located. - Type: string (or Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is - located. (Usually a two-digit decimal number represented as a string.) - Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the - table is located. (Usually a three-digit decimal number represented as a - string) Type: string (or Expression with resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. - The default value is EN. Type: string (or Expression with resultType - string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. - Type: string (or Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is - located. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: - string (or Expression with resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the - Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where - the table is located. Must be either 0 (off) or 1 (on). Type: string (or - Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where - the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, - 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or - Expression with resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTableLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.system_id = kwargs.get('system_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.message_server = kwargs.get('message_server', None) - self.message_server_service = kwargs.get('message_server_service', None) - self.snc_mode = kwargs.get('snc_mode', None) - self.snc_my_name = kwargs.get('snc_my_name', None) - self.snc_partner_name = kwargs.get('snc_partner_name', None) - self.snc_library_path = kwargs.get('snc_library_path', None) - self.snc_qop = kwargs.get('snc_qop', None) - self.logon_group = kwargs.get('logon_group', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py deleted file mode 100644 index d098acc1bbda..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py +++ /dev/null @@ -1,140 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Host name of the SAP instance where the table is located. - Type: string (or Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is - located. (Usually a two-digit decimal number represented as a string.) - Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the - table is located. (Usually a three-digit decimal number represented as a - string) Type: string (or Expression with resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. - The default value is EN. Type: string (or Expression with resultType - string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. - Type: string (or Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is - located. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: - string (or Expression with resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the - Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where - the table is located. Must be either 0 (off) or 1 (on). Type: string (or - Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where - the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, - 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or - Expression with resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: - super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.system_id = system_id - self.user_name = user_name - self.password = password - self.message_server = message_server - self.message_server_service = message_server_service - self.snc_mode = snc_mode - self.snc_my_name = snc_my_name - self.snc_partner_name = snc_partner_name - self.snc_library_path = snc_library_path - self.snc_qop = snc_qop - self.logon_group = logon_group - self.encrypted_credential = encrypted_credential - self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py deleted file mode 100644 index b688fe16683b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py deleted file mode 100644 index 37bdf610ab35..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py deleted file mode 100644 index 24601ba6b793..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The name of the SAP Table. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTableResourceDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py deleted file mode 100644 index 7b034ccd3a91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The name of the SAP Table. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py deleted file mode 100644 index 35799515440e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SapTableSource(CopySource): - """A copy activity source for SAP Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param row_count: The number of rows to be retrieved. Type: integer(or - Expression with resultType integer). - :type row_count: object - :param row_skips: The number of rows that will be skipped. Type: integer - (or Expression with resultType integer). - :type row_skips: object - :param rfc_table_fields: The fields of the SAP table that will be - retrieved. For example, column0, column1. Type: string (or Expression with - resultType string). - :type rfc_table_fields: object - :param rfc_table_options: The options for the filtering of the SAP Table. - For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with - resultType string). - :type rfc_table_options: object - :param batch_size: Specifies the maximum number of rows that will be - retrieved at a time when retrieving data from SAP Table. Type: integer (or - Expression with resultType integer). - :type batch_size: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC - function module that will be used to read data from SAP Table. Type: - string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. Possible values include: 'None', 'PartitionOnInt', - 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', - 'PartitionOnCalendarDate', 'PartitionOnTime' - :type partition_option: str or - ~azure.mgmt.datafactory.models.SapTablePartitionOption - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, - } - - def __init__(self, **kwargs): - super(SapTableSource, self).__init__(**kwargs) - self.row_count = kwargs.get('row_count', None) - self.row_skips = kwargs.get('row_skips', None) - self.rfc_table_fields = kwargs.get('rfc_table_fields', None) - self.rfc_table_options = kwargs.get('rfc_table_options', None) - self.batch_size = kwargs.get('batch_size', None) - self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py deleted file mode 100644 index bed7bbb93932..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapTableSource(CopySource): - """A copy activity source for SAP Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param row_count: The number of rows to be retrieved. Type: integer(or - Expression with resultType integer). - :type row_count: object - :param row_skips: The number of rows that will be skipped. Type: integer - (or Expression with resultType integer). - :type row_skips: object - :param rfc_table_fields: The fields of the SAP table that will be - retrieved. For example, column0, column1. Type: string (or Expression with - resultType string). - :type rfc_table_fields: object - :param rfc_table_options: The options for the filtering of the SAP Table. - For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with - resultType string). - :type rfc_table_options: object - :param batch_size: Specifies the maximum number of rows that will be - retrieved at a time when retrieving data from SAP Table. Type: integer (or - Expression with resultType integer). - :type batch_size: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC - function module that will be used to read data from SAP Table. Type: - string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. Possible values include: 'None', 'PartitionOnInt', - 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', - 'PartitionOnCalendarDate', 'PartitionOnTime' - :type partition_option: str or - ~azure.mgmt.datafactory.models.SapTablePartitionOption - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.row_count = row_count - self.row_skips = row_skips - self.rfc_table_fields = rfc_table_fields - self.rfc_table_options = rfc_table_options - self.batch_size = batch_size - self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py deleted file mode 100644 index b9ea331b8c6e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger import MultiplePipelineTrigger - - -class ScheduleTrigger(MultiplePipelineTrigger): - """Trigger that creates pipeline runs periodically, on schedule. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'recurrence': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, - } - - def __init__(self, **kwargs): - super(ScheduleTrigger, self).__init__(**kwargs) - self.recurrence = kwargs.get('recurrence', None) - self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py deleted file mode 100644 index f13f01c7fa13..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - - -class ScheduleTrigger(MultiplePipelineTrigger): - """Trigger that creates pipeline runs periodically, on schedule. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'recurrence': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, - } - - def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.recurrence = recurrence - self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py deleted file mode 100644 index 85408c45547b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ScheduleTriggerRecurrence(Model): - """The workflow trigger recurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param frequency: The frequency. Possible values include: 'NotSpecified', - 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: datetime - :param end_time: The end time. - :type end_time: datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__(self, **kwargs): - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.time_zone = kwargs.get('time_zone', None) - self.schedule = kwargs.get('schedule', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py deleted file mode 100644 index a9b6eded7b96..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ScheduleTriggerRecurrence(Model): - """The workflow trigger recurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param frequency: The frequency. Possible values include: 'NotSpecified', - 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: datetime - :param end_time: The end time. - :type end_time: datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.time_zone = time_zone - self.schedule = schedule diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py deleted file mode 100644 index 50bc0131a5cf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ScriptAction(Model): - """Custom script action to run on HDI ondemand cluster once it's up. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should - be executed. - :type roles: object - :param parameters: The parameters for the script action. - :type parameters: str - """ - - _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ScriptAction, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.uri = kwargs.get('uri', None) - self.roles = kwargs.get('roles', None) - self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py deleted file mode 100644 index c0e278073219..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ScriptAction(Model): - """Custom script action to run on HDI ondemand cluster once it's up. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should - be executed. - :type roles: object - :param parameters: The parameters for the script action. - :type parameters: str - """ - - _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, - } - - def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: - super(ScriptAction, self).__init__(**kwargs) - self.name = name - self.uri = uri - self.roles = roles - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py deleted file mode 100644 index 3d9475dd4382..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SecretBase(Model): - """The base definition of a secret type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SecureString, AzureKeyVaultSecretReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} - } - - def __init__(self, **kwargs): - super(SecretBase, self).__init__(**kwargs) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py deleted file mode 100644 index 29403e61b245..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SecretBase(Model): - """The base definition of a secret type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SecureString, AzureKeyVaultSecretReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} - } - - def __init__(self, **kwargs) -> None: - super(SecretBase, self).__init__(**kwargs) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py deleted file mode 100644 index bec430fdf8a4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .secret_base import SecretBase - - -class SecureString(SecretBase): - """Azure Data Factory secure string definition. The string value will be - masked with asterisks '*' during Get or List API calls. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SecureString, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py deleted file mode 100644 index d7ebd5e13e78..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .secret_base_py3 import SecretBase - - -class SecureString(SecretBase): - """Azure Data Factory secure string definition. The string value will be - masked with asterisks '*' during Get or List API calls. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, *, value: str, **kwargs) -> None: - super(SecureString, self).__init__(**kwargs) - self.value = value - self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py deleted file mode 100644 index fc56f8e8a799..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dependency_reference import DependencyReference - - -class SelfDependencyTumblingWindowTriggerReference(DependencyReference): - """Self referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling - window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) - self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py deleted file mode 100644 index 1dd1e575c2e8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dependency_reference_py3 import DependencyReference - - -class SelfDependencyTumblingWindowTriggerReference(DependencyReference): - """Self referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling - window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.offset = offset - self.size = size - self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py deleted file mode 100644 index 20744f02306d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime import IntegrationRuntime - - -class SelfHostedIntegrationRuntime(IntegrationRuntime): - """Self-hosted integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param linked_info: - :type linked_info: - ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, - } - - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) - self.linked_info = kwargs.get('linked_info', None) - self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py deleted file mode 100644 index 1491a80dc19a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SelfHostedIntegrationRuntimeNode(Model): - """Properties of Self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar machine_name: Machine name of the integration runtime node. - :vartype machine_name: str - :ivar host_service_uri: URI for the host machine of the integration - runtime. - :vartype host_service_uri: str - :ivar status: Status of the integration runtime node. Possible values - include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', - 'Initializing', 'InitializeFailed' - :vartype status: str or - ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus - :ivar capabilities: The integration runtime capabilities dictionary - :vartype capabilities: dict[str, str] - :ivar version_status: Status of the integration runtime node version. - :vartype version_status: str - :ivar version: Version of the integration runtime node. - :vartype version: str - :ivar register_time: The time at which the integration runtime node was - registered in ISO8601 format. - :vartype register_time: datetime - :ivar last_connect_time: The most recent time at which the integration - runtime was connected in ISO8601 format. - :vartype last_connect_time: datetime - :ivar expiry_time: The time at which the integration runtime will expire - in ISO8601 format. - :vartype expiry_time: datetime - :ivar last_start_time: The time the node last started up. - :vartype last_start_time: datetime - :ivar last_stop_time: The integration runtime node last stop time. - :vartype last_stop_time: datetime - :ivar last_update_result: The result of the last integration runtime node - update. Possible values include: 'None', 'Succeed', 'Fail' - :vartype last_update_result: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult - :ivar last_start_update_time: The last time for the integration runtime - node update start. - :vartype last_start_update_time: datetime - :ivar last_end_update_time: The last time for the integration runtime node - update end. - :vartype last_end_update_time: datetime - :ivar is_active_dispatcher: Indicates whether this node is the active - dispatcher for integration runtime requests. - :vartype is_active_dispatcher: bool - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - """ - - _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_name = None - self.machine_name = None - self.host_service_uri = None - self.status = None - self.capabilities = None - self.version_status = None - self.version = None - self.register_time = None - self.last_connect_time = None - self.expiry_time = None - self.last_start_time = None - self.last_stop_time = None - self.last_update_result = None - self.last_start_update_time = None - self.last_end_update_time = None - self.is_active_dispatcher = None - self.concurrent_jobs_limit = None - self.max_concurrent_jobs = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py deleted file mode 100644 index 59b703737a5d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SelfHostedIntegrationRuntimeNode(Model): - """Properties of Self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar machine_name: Machine name of the integration runtime node. - :vartype machine_name: str - :ivar host_service_uri: URI for the host machine of the integration - runtime. - :vartype host_service_uri: str - :ivar status: Status of the integration runtime node. Possible values - include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', - 'Initializing', 'InitializeFailed' - :vartype status: str or - ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus - :ivar capabilities: The integration runtime capabilities dictionary - :vartype capabilities: dict[str, str] - :ivar version_status: Status of the integration runtime node version. - :vartype version_status: str - :ivar version: Version of the integration runtime node. - :vartype version: str - :ivar register_time: The time at which the integration runtime node was - registered in ISO8601 format. - :vartype register_time: datetime - :ivar last_connect_time: The most recent time at which the integration - runtime was connected in ISO8601 format. - :vartype last_connect_time: datetime - :ivar expiry_time: The time at which the integration runtime will expire - in ISO8601 format. - :vartype expiry_time: datetime - :ivar last_start_time: The time the node last started up. - :vartype last_start_time: datetime - :ivar last_stop_time: The integration runtime node last stop time. - :vartype last_stop_time: datetime - :ivar last_update_result: The result of the last integration runtime node - update. Possible values include: 'None', 'Succeed', 'Fail' - :vartype last_update_result: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult - :ivar last_start_update_time: The last time for the integration runtime - node update start. - :vartype last_start_update_time: datetime - :ivar last_end_update_time: The last time for the integration runtime node - update end. - :vartype last_end_update_time: datetime - :ivar is_active_dispatcher: Indicates whether this node is the active - dispatcher for integration runtime requests. - :vartype is_active_dispatcher: bool - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - """ - - _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_name = None - self.machine_name = None - self.host_service_uri = None - self.status = None - self.capabilities = None - self.version_status = None - self.version = None - self.register_time = None - self.last_connect_time = None - self.expiry_time = None - self.last_start_time = None - self.last_stop_time = None - self.last_update_result = None - self.last_start_update_time = None - self.last_end_update_time = None - self.is_active_dispatcher = None - self.concurrent_jobs_limit = None - self.max_concurrent_jobs = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py deleted file mode 100644 index a25d04373849..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_py3 import IntegrationRuntime - - -class SelfHostedIntegrationRuntime(IntegrationRuntime): - """Self-hosted integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param linked_info: - :type linked_info: - ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.linked_info = linked_info - self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py deleted file mode 100644 index 5dd9995987d9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py +++ /dev/null @@ -1,146 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_status import IntegrationRuntimeStatus - - -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Self-hosted integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar task_queue_id: The task queue id of the integration runtime. - :vartype task_queue_id: str - :ivar internal_channel_encryption: It is used to set the encryption mode - for node-node communication channel (when more than 2 self-hosted - integration runtime nodes exist). Possible values include: 'NotSet', - 'SslEncrypted', 'NotEncrypted' - :vartype internal_channel_encryption: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode - :ivar version: Version of the integration runtime. - :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: - list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] - :ivar scheduled_update_date: The date at which the integration runtime - will be scheduled to update, in ISO8601 format. - :vartype scheduled_update_date: datetime - :ivar update_delay_offset: The time in the date scheduled by service to - update the integration runtime, e.g., PT03H is 3 hours - :vartype update_delay_offset: str - :ivar local_time_zone_offset: The local time zone offset in hours. - :vartype local_time_zone_offset: str - :ivar capabilities: Object with additional information about integration - runtime capabilities. - :vartype capabilities: dict[str, str] - :ivar service_urls: The URLs for the services used in integration runtime - backend service. - :vartype service_urls: list[str] - :ivar auto_update: Whether Self-hosted integration runtime auto update has - been turned on. Possible values include: 'On', 'Off' - :vartype auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :ivar version_status: Status of the integration runtime version. - :vartype version_status: str - :param links: The list of linked integration runtimes that are created to - share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] - :ivar pushed_version: The version that the integration runtime is going to - update to. - :vartype pushed_version: str - :ivar latest_version: The latest version on download center. - :vartype latest_version: str - :ivar auto_update_eta: The estimated time when the self-hosted integration - runtime will be updated. - :vartype auto_update_eta: datetime - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.create_time = None - self.task_queue_id = None - self.internal_channel_encryption = None - self.version = None - self.nodes = kwargs.get('nodes', None) - self.scheduled_update_date = None - self.update_delay_offset = None - self.local_time_zone_offset = None - self.capabilities = None - self.service_urls = None - self.auto_update = None - self.version_status = None - self.links = kwargs.get('links', None) - self.pushed_version = None - self.latest_version = None - self.auto_update_eta = None - self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py deleted file mode 100644 index acad7661fc15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py +++ /dev/null @@ -1,146 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_status_py3 import IntegrationRuntimeStatus - - -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Self-hosted integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar task_queue_id: The task queue id of the integration runtime. - :vartype task_queue_id: str - :ivar internal_channel_encryption: It is used to set the encryption mode - for node-node communication channel (when more than 2 self-hosted - integration runtime nodes exist). Possible values include: 'NotSet', - 'SslEncrypted', 'NotEncrypted' - :vartype internal_channel_encryption: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode - :ivar version: Version of the integration runtime. - :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: - list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] - :ivar scheduled_update_date: The date at which the integration runtime - will be scheduled to update, in ISO8601 format. - :vartype scheduled_update_date: datetime - :ivar update_delay_offset: The time in the date scheduled by service to - update the integration runtime, e.g., PT03H is 3 hours - :vartype update_delay_offset: str - :ivar local_time_zone_offset: The local time zone offset in hours. - :vartype local_time_zone_offset: str - :ivar capabilities: Object with additional information about integration - runtime capabilities. - :vartype capabilities: dict[str, str] - :ivar service_urls: The URLs for the services used in integration runtime - backend service. - :vartype service_urls: list[str] - :ivar auto_update: Whether Self-hosted integration runtime auto update has - been turned on. Possible values include: 'On', 'Off' - :vartype auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :ivar version_status: Status of the integration runtime version. - :vartype version_status: str - :param links: The list of linked integration runtimes that are created to - share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] - :ivar pushed_version: The version that the integration runtime is going to - update to. - :vartype pushed_version: str - :ivar latest_version: The latest version on download center. - :vartype latest_version: str - :ivar auto_update_eta: The estimated time when the self-hosted integration - runtime will be updated. - :vartype auto_update_eta: datetime - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.create_time = None - self.task_queue_id = None - self.internal_channel_encryption = None - self.version = None - self.nodes = nodes - self.scheduled_update_date = None - self.update_delay_offset = None - self.local_time_zone_offset = None - self.capabilities = None - self.service_urls = None - self.auto_update = None - self.version_status = None - self.links = links - self.pushed_version = None - self.latest_version = None - self.auto_update_eta = None - self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py deleted file mode 100644 index 4d42f575e769..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - .service-now.com) - :type endpoint: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Basic', 'OAuth2' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server - for Basic and OAuth2 authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and - OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ServiceNowLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py deleted file mode 100644 index b9d166f241d6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - .service-now.com) - :type endpoint: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Basic', 'OAuth2' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server - for Basic and OAuth2 authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and - OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.authentication_type = authentication_type - self.username = username - self.password = password - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py deleted file mode 100644 index a9821ba0fd10..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py deleted file mode 100644 index fcd2fd537a31..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py deleted file mode 100644 index 16b10bb8de5e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ServiceNowSource(CopySource): - """A copy activity ServiceNow server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ServiceNowSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py deleted file mode 100644 index 20d1a64d04d3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ServiceNowSource(CopySource): - """A copy activity ServiceNow server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py deleted file mode 100644 index e8dd1690862d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class SetVariableActivity(ControlActivity): - """Set value for a Variable. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SetVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py deleted file mode 100644 index e045abee3dfb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class SetVariableActivity(ControlActivity): - """Set value for a Variable. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: - super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.variable_name = variable_name - self.value = value - self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py deleted file mode 100644 index 5b8fd4e42ba2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location import DatasetLocation - - -class SftpLocation(DatasetLocation): - """The location of SFTP dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py deleted file mode 100644 index c5e2feafa971..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class SftpLocation(DatasetLocation): - """The location of SFTP dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py deleted file mode 100644 index 5e7b4faf77ad..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings import StoreReadSettings - - -class SftpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py deleted file mode 100644 index e6c27e3ad08a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class SftpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py deleted file mode 100644 index aa4c535fc514..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py +++ /dev/null @@ -1,119 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. . - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The SFTP server host name. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for - client connections. Default value is 22. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'SshPublicKey' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic - authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey - authentication. Only valid for on-premises copy. For on-premises copy with - SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent - should be specified. SSH private key should be OpenSSH format. Type: - string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for - SshPublicKey authentication. For on-premises copy with SshPublicKey - authentication, either PrivateKeyPath or PrivateKeyContent should be - specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH - private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key - validation. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. - When SkipHostKeyValidation is false, HostKeyFingerprint should be - specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.private_key_path = kwargs.get('private_key_path', None) - self.private_key_content = kwargs.get('private_key_content', None) - self.pass_phrase = kwargs.get('pass_phrase', None) - self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) - self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) - self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py deleted file mode 100644 index 7decd7781348..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py +++ /dev/null @@ -1,119 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. . - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The SFTP server host name. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for - client connections. Default value is 22. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'SshPublicKey' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic - authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey - authentication. Only valid for on-premises copy. For on-premises copy with - SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent - should be specified. SSH private key should be OpenSSH format. Type: - string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for - SshPublicKey authentication. For on-premises copy with SshPublicKey - authentication, either PrivateKeyPath or PrivateKeyContent should be - specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH - private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key - validation. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. - When SkipHostKeyValidation is false, HostKeyFingerprint should be - specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: - super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.private_key_path = private_key_path - self.private_key_content = private_key_content - self.pass_phrase = pass_phrase - self.skip_host_key_validation = skip_host_key_validation - self.host_key_fingerprint = host_key_fingerprint - self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py deleted file mode 100644 index ee5311dceb7a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Shopify server. (i.e. - mystore.myshopify.com) - :type host: object - :param access_token: The API access token that can be used to access - Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ShopifyLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py deleted file mode 100644 index ea6189277552..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Shopify server. (i.e. - mystore.myshopify.com) - :type host: object - :param access_token: The API access token that can be used to access - Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py deleted file mode 100644 index ab3e475b9c97..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ShopifyObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py deleted file mode 100644 index 98b9c43c21e8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py deleted file mode 100644 index d4596976d459..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ShopifySource(CopySource): - """A copy activity Shopify Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ShopifySource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py deleted file mode 100644 index 6b56edd62904..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ShopifySource(CopySource): - """A copy activity Shopify Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py deleted file mode 100644 index 4f9ab49a7bba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py +++ /dev/null @@ -1,131 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SparkLinkedService(LinkedService): - """Spark Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Spark server - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen - for client connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: - 'SharkServer', 'SharkServer2', 'SparkThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Spark server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SparkLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py deleted file mode 100644 index f6433b6ab187..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py +++ /dev/null @@ -1,131 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SparkLinkedService(LinkedService): - """Spark Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Spark server - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen - for client connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: - 'SharkServer', 'SharkServer2', 'SparkThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Spark server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py deleted file mode 100644 index bdbdf067e1ea..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SparkObjectDataset(Dataset): - """Spark Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression - with resultType string). - :type table: object - :param spark_object_dataset_schema: The schema name of the Spark. Type: - string (or Expression with resultType string). - :type spark_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SparkObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) - self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py deleted file mode 100644 index afe383951f1c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SparkObjectDataset(Dataset): - """Spark Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression - with resultType string). - :type table: object - :param spark_object_dataset_schema: The schema name of the Spark. Type: - string (or Expression with resultType string). - :type spark_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: - super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.spark_object_dataset_schema = spark_object_dataset_schema - self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py deleted file mode 100644 index 6d670c1c6b2a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SparkSource(CopySource): - """A copy activity Spark Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SparkSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py deleted file mode 100644 index 8da01b0cd823..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SparkSource(CopySource): - """A copy activity Spark Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py deleted file mode 100644 index 6b4785b91ab4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SqlDWSink(CopySink): - """A copy activity SQL Data Warehouse sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL - Data Warehouse when applicable. Type: boolean (or Expression with - resultType boolean). - :type allow_poly_base: object - :param poly_base_settings: Specifies PolyBase-related settings when - allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - } - - def __init__(self, **kwargs): - super(SqlDWSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.allow_poly_base = kwargs.get('allow_poly_base', None) - self.poly_base_settings = kwargs.get('poly_base_settings', None) - self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py deleted file mode 100644 index efe63dcf788a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlDWSink(CopySink): - """A copy activity SQL Data Warehouse sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL - Data Warehouse when applicable. Type: boolean (or Expression with - resultType boolean). - :type allow_poly_base: object - :param poly_base_settings: Specifies PolyBase-related settings when - allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.allow_poly_base = allow_poly_base - self.poly_base_settings = poly_base_settings - self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py deleted file mode 100644 index 1a020672f7c2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SqlDWSource(CopySource): - """A copy activity SQL Data Warehouse source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or - Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Data Warehouse source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - Type: object (or Expression with resultType object), itemType: - StoredProcedureParameter. - :type stored_procedure_parameters: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlDWSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py deleted file mode 100644 index ae8fe605024f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlDWSource(CopySource): - """A copy activity SQL Data Warehouse source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or - Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Data Warehouse source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - Type: object (or Expression with resultType object), itemType: - StoredProcedureParameter. - :type stored_procedure_parameters: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py deleted file mode 100644 index bb03e3e48da9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SqlMISink(CopySink): - """A copy activity Azure SQL Managed Instance sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlMISink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py deleted file mode 100644 index 27d22f47d8a4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlMISink(CopySink): - """A copy activity Azure SQL Managed Instance sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py deleted file mode 100644 index 4d4db9b09281..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SqlMISource(CopySource): - """A copy activity Azure SQL Managed Instance source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a Azure SQL Managed Instance source. This cannot be used at the same time - as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlMISource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py deleted file mode 100644 index 952bc7b4da4f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlMISource(CopySource): - """A copy activity Azure SQL Managed Instance source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a Azure SQL Managed Instance source. This cannot be used at the same time - as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py deleted file mode 100644 index 45d342212ea4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py deleted file mode 100644 index 3eb8c5063dc1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py deleted file mode 100644 index 45b1f1273903..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SqlServerSink(CopySink): - """A copy activity SQL server sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py deleted file mode 100644 index dbe1bf44e418..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlServerSink(CopySink): - """A copy activity SQL server sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py deleted file mode 100644 index f9aa011047ea..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SqlServerSource(CopySource): - """A copy activity SQL server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py deleted file mode 100644 index 27d12985e595..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlServerSource(CopySource): - """A copy activity SQL server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py deleted file mode 100644 index 6f31002f32d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class SqlServerStoredProcedureActivity(ExecutionActivity): - """SQL stored procedure activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: - string (or Expression with resultType string). - :type stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, **kwargs): - super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.stored_procedure_name = kwargs.get('stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py deleted file mode 100644 index 477f0c6c775c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class SqlServerStoredProcedureActivity(ExecutionActivity): - """SQL stored procedure activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: - string (or Expression with resultType string). - :type stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.stored_procedure_name = stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py deleted file mode 100644 index 3998671ee8ae..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param sql_server_table_dataset_schema: The schema name of the SQL Server - dataset. Type: string (or Expression with resultType string). - :type sql_server_table_dataset_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py deleted file mode 100644 index 989780c9bfda..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param sql_server_table_dataset_schema: The schema name of the SQL Server - dataset. Type: string (or Expression with resultType string). - :type sql_server_table_dataset_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.sql_server_table_dataset_schema = sql_server_table_dataset_schema - self.table = table - self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py deleted file mode 100644 index 7ec0313aab4b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink import CopySink - - -class SqlSink(CopySink): - """A copy activity SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py deleted file mode 100644 index 1f6bb9685082..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlSink(CopySink): - """A copy activity SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py deleted file mode 100644 index bb31474b1f7c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SqlSource(CopySource): - """A copy activity SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, **kwargs): - super(SqlSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py deleted file mode 100644 index dcad458fd4a6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlSource(CopySource): - """A copy activity SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py deleted file mode 100644 index 4edfc8b211f7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SquareLinkedService(LinkedService): - """Square Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Square instance. (i.e. - mystore.mysquare.com) - :type host: object - :param client_id: Required. The client ID associated with your Square - application. - :type client_id: object - :param client_secret: The client secret associated with your Square - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square - application dashboard. (i.e. http://localhost:2500) - :type redirect_uri: object - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SquareLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.redirect_uri = kwargs.get('redirect_uri', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py deleted file mode 100644 index 40719f600a18..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SquareLinkedService(LinkedService): - """Square Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Square instance. (i.e. - mystore.mysquare.com) - :type host: object - :param client_id: Required. The client ID associated with your Square - application. - :type client_id: object - :param client_secret: The client secret associated with your Square - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square - application dashboard. (i.e. http://localhost:2500) - :type redirect_uri: object - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.redirect_uri = redirect_uri - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py deleted file mode 100644 index 3903382d2e3a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SquareObjectDataset(Dataset): - """Square Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SquareObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py deleted file mode 100644 index 6d624dc6feef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SquareObjectDataset(Dataset): - """Square Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py deleted file mode 100644 index f083df43f13a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SquareSource(CopySource): - """A copy activity Square Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SquareSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py deleted file mode 100644 index ec8a741d564c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SquareSource(CopySource): - """A copy activity Square Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py deleted file mode 100644 index 63512fdec4d8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISAccessCredential(Model): - """SSIS access credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(SSISAccessCredential, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py deleted file mode 100644 index 5df0fc8941da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISAccessCredential(Model): - """SSIS access credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, *, domain, user_name, password, **kwargs) -> None: - super(SSISAccessCredential, self).__init__(**kwargs) - self.domain = domain - self.user_name = user_name - self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py deleted file mode 100644 index 5dff9764e2a2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata import SsisObjectMetadata - - -class SsisEnvironment(SsisObjectMetadata): - """Ssis environment. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, - } - - def __init__(self, **kwargs): - super(SsisEnvironment, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.variables = kwargs.get('variables', None) - self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py deleted file mode 100644 index 43697ba62146..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisEnvironment(SsisObjectMetadata): - """Ssis environment. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: - super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.variables = variables - self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py deleted file mode 100644 index e7d31d369392..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisEnvironmentReference(Model): - """Ssis environment reference. - - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type - :type reference_type: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.environment_folder_name = kwargs.get('environment_folder_name', None) - self.environment_name = kwargs.get('environment_name', None) - self.reference_type = kwargs.get('reference_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py deleted file mode 100644 index 14cbfca99d4f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisEnvironmentReference(Model): - """Ssis environment reference. - - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type - :type reference_type: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = id - self.environment_folder_name = environment_folder_name - self.environment_name = environment_name - self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py deleted file mode 100644 index c090694416a9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISExecutionCredential(Model): - """SSIS package execution credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(SSISExecutionCredential, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py deleted file mode 100644 index 051eaffa2bf2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISExecutionCredential(Model): - """SSIS package execution credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, *, domain, user_name, password, **kwargs) -> None: - super(SSISExecutionCredential, self).__init__(**kwargs) - self.domain = domain - self.user_name = user_name - self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py deleted file mode 100644 index 36f295c5a4aa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISExecutionParameter(Model): - """SSIS execution parameter. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: - string (or Expression with resultType string). - :type value: object - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SSISExecutionParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py deleted file mode 100644 index cd10dd457a42..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISExecutionParameter(Model): - """SSIS execution parameter. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: - string (or Expression with resultType string). - :type value: object - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, *, value, **kwargs) -> None: - super(SSISExecutionParameter, self).__init__(**kwargs) - self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py deleted file mode 100644 index 350b0d92852b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata import SsisObjectMetadata - - -class SsisFolder(SsisObjectMetadata): - """Ssis folder. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisFolder, self).__init__(**kwargs) - self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py deleted file mode 100644 index d6483fda2c08..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisFolder(SsisObjectMetadata): - """Ssis folder. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: - super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py deleted file mode 100644 index cfdebe717541..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISLogLocation(Model): - """SSIS package execution log location. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: - string (or Expression with resultType string). - :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File" - . - :vartype type: str - :param access_credential: The package execution log access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The - default interval is 5 minutes. Type: string (or Expression with resultType - string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object - """ - - _validation = { - 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, - } - - type = "File" - - def __init__(self, **kwargs): - super(SSISLogLocation, self).__init__(**kwargs) - self.log_path = kwargs.get('log_path', None) - self.access_credential = kwargs.get('access_credential', None) - self.log_refresh_interval = kwargs.get('log_refresh_interval', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py deleted file mode 100644 index de4fbe35dcb3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISLogLocation(Model): - """SSIS package execution log location. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: - string (or Expression with resultType string). - :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File" - . - :vartype type: str - :param access_credential: The package execution log access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The - default interval is 5 minutes. Type: string (or Expression with resultType - string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object - """ - - _validation = { - 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, - } - - type = "File" - - def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: - super(SSISLogLocation, self).__init__(**kwargs) - self.log_path = log_path - self.access_credential = access_credential - self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py deleted file mode 100644 index 811075137f41..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadata(Model): - """SSIS object metadata. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} - } - - def __init__(self, **kwargs): - super(SsisObjectMetadata, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py deleted file mode 100644 index a029c9f7ebc4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadataListResponse(Model): - """A list of SSIS object metadata. - - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py deleted file mode 100644 index 79931e1ceaf7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadataListResponse(Model): - """A list of SSIS object metadata. - - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py deleted file mode 100644 index 45f7e15af4fa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadata(Model): - """SSIS object metadata. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: - super(SsisObjectMetadata, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py deleted file mode 100644 index 9b782613ee08..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadataStatusResponse(Model): - """The status of the operation. - - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) - self.error = kwargs.get('error', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py deleted file mode 100644 index a4b82b8f6bcd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadataStatusResponse(Model): - """The status of the operation. - - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = status - self.name = name - self.properties = properties - self.error = error diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py deleted file mode 100644 index b04fc1138797..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata import SsisObjectMetadata - - -class SsisPackage(SsisObjectMetadata): - """Ssis Package. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, **kwargs): - super(SsisPackage, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.project_version = kwargs.get('project_version', None) - self.project_id = kwargs.get('project_id', None) - self.parameters = kwargs.get('parameters', None) - self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py deleted file mode 100644 index 248d0aa9b8ae..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISPackageLocation(Model): - """SSIS package location. - - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. The SSIS package path. Type: string (or - Expression with resultType string). - :type package_path: object - :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File' - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecureString - :param access_credential: The package access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package - execution. Type: string (or Expression with resultType string). - :type configuration_path: object - """ - - _validation = { - 'package_path': {'required': True}, - } - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SSISPackageLocation, self).__init__(**kwargs) - self.package_path = kwargs.get('package_path', None) - self.type = kwargs.get('type', None) - self.package_password = kwargs.get('package_password', None) - self.access_credential = kwargs.get('access_credential', None) - self.configuration_path = kwargs.get('configuration_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py deleted file mode 100644 index cc442d8d35b8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISPackageLocation(Model): - """SSIS package location. - - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. The SSIS package path. Type: string (or - Expression with resultType string). - :type package_path: object - :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File' - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecureString - :param access_credential: The package access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package - execution. Type: string (or Expression with resultType string). - :type configuration_path: object - """ - - _validation = { - 'package_path': {'required': True}, - } - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - } - - def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: - super(SSISPackageLocation, self).__init__(**kwargs) - self.package_path = package_path - self.type = type - self.package_password = package_password - self.access_credential = access_credential - self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py deleted file mode 100644 index e1e932e97ae6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisPackage(SsisObjectMetadata): - """Ssis Package. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: - super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.project_version = project_version - self.project_id = project_id - self.parameters = parameters - self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py deleted file mode 100644 index c456af0bab48..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisParameter(Model): - """Ssis parameter. - - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisParameter, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.required = kwargs.get('required', None) - self.sensitive = kwargs.get('sensitive', None) - self.design_default_value = kwargs.get('design_default_value', None) - self.default_value = kwargs.get('default_value', None) - self.sensitive_default_value = kwargs.get('sensitive_default_value', None) - self.value_type = kwargs.get('value_type', None) - self.value_set = kwargs.get('value_set', None) - self.variable = kwargs.get('variable', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py deleted file mode 100644 index 6a4ff73768f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisParameter(Model): - """Ssis parameter. - - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: - super(SsisParameter, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.required = required - self.sensitive = sensitive - self.design_default_value = design_default_value - self.default_value = default_value - self.sensitive_default_value = sensitive_default_value - self.value_type = value_type - self.value_set = value_set - self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py deleted file mode 100644 index c29a36fb628e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata import SsisObjectMetadata - - -class SsisProject(SsisObjectMetadata): - """Ssis project. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project - :type environment_refs: - list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, **kwargs): - super(SsisProject, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.version = kwargs.get('version', None) - self.environment_refs = kwargs.get('environment_refs', None) - self.parameters = kwargs.get('parameters', None) - self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py deleted file mode 100644 index 11b95a644e2f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisProject(SsisObjectMetadata): - """Ssis project. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project - :type environment_refs: - list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: - super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.version = version - self.environment_refs = environment_refs - self.parameters = parameters - self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py deleted file mode 100644 index 30b78594e6ab..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISPropertyOverride(Model): - """SSIS property override. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string - (or Expression with resultType string). - :type value: object - :param is_sensitive: Whether SSIS package property override value is - sensitive data. Value will be encrypted in SSISDB if it is true - :type is_sensitive: bool - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(SSISPropertyOverride, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.is_sensitive = kwargs.get('is_sensitive', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py deleted file mode 100644 index b425a19adc7e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISPropertyOverride(Model): - """SSIS property override. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string - (or Expression with resultType string). - :type value: object - :param is_sensitive: Whether SSIS package property override value is - sensitive data. Value will be encrypted in SSISDB if it is true - :type is_sensitive: bool - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, - } - - def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: - super(SSISPropertyOverride, self).__init__(**kwargs) - self.value = value - self.is_sensitive = is_sensitive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py deleted file mode 100644 index 73fda3b27967..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisVariable(Model): - """Ssis variable. - - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisVariable, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.sensitive = kwargs.get('sensitive', None) - self.value = kwargs.get('value', None) - self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py deleted file mode 100644 index e709842ff465..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisVariable(Model): - """Ssis variable. - - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: - super(SsisVariable, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.sensitive = sensitive - self.value = value - self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py deleted file mode 100644 index 05ca8dff2c52..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StagingSettings(Model): - """Staging settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: - string (or Expression with resultType string). - :type path: object - :param enable_compression: Specifies whether to use compression when - copying data via an interim staging. Default value is false. Type: boolean - (or Expression with resultType boolean). - :type enable_compression: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) - self.enable_compression = kwargs.get('enable_compression', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py deleted file mode 100644 index 13b4353963a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StagingSettings(Model): - """Staging settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: - string (or Expression with resultType string). - :type path: object - :param enable_compression: Specifies whether to use compression when - copying data via an interim staging. Default value is false. Type: boolean - (or Expression with resultType boolean). - :type enable_compression: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path - self.enable_compression = enable_compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py deleted file mode 100644 index c12c0ce8860d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoreReadSettings(Model): - """Connector read setting. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py deleted file mode 100644 index e2026fd52b93..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoreReadSettings(Model): - """Connector read setting. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py deleted file mode 100644 index aeaebc1a190e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoreWriteSettings(Model): - """Connector write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.copy_behavior = kwargs.get('copy_behavior', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py deleted file mode 100644 index d3dfa76faddc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoreWriteSettings(Model): - """Connector write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.max_concurrent_connections = max_concurrent_connections - self.copy_behavior = copy_behavior diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py deleted file mode 100644 index ff16595aa8c7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoredProcedureParameter(Model): - """SQL stored procedure parameter. - - :param value: Stored procedure parameter value. Type: string (or - Expression with resultType string). - :type value: object - :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' - :type type: str or - ~azure.mgmt.datafactory.models.StoredProcedureParameterType - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py deleted file mode 100644 index 2842ef9ae35c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoredProcedureParameter(Model): - """SQL stored procedure parameter. - - :param value: Stored procedure parameter value. Type: string (or - Expression with resultType string). - :type value: object - :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' - :type type: str or - ~azure.mgmt.datafactory.models.StoredProcedureParameterType - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, value=None, type=None, **kwargs) -> None: - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = value - self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py deleted file mode 100644 index c80b531db7d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py deleted file mode 100644 index 3b2d9ec62366..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py deleted file mode 100644 index 83de0e6f61f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class SybaseLinkedService(LinkedService): - """Linked service for Sybase data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param schema: Schema name for connection. Type: string (or Expression - with resultType string). - :type schema: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SybaseLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.schema = kwargs.get('schema', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py deleted file mode 100644 index 5b6cc0ce6ded..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SybaseLinkedService(LinkedService): - """Linked service for Sybase data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param schema: Schema name for connection. Type: string (or Expression - with resultType string). - :type schema: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.database = database - self.schema = schema - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py deleted file mode 100644 index 02f89a8fca25..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class SybaseSource(CopySource): - """A copy activity source for Sybase databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SybaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py deleted file mode 100644 index c11e96174349..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SybaseSource(CopySource): - """A copy activity source for Sybase databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py deleted file mode 100644 index ff2dfd5471fb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class SybaseTableDataset(Dataset): - """The Sybase table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Sybase table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SybaseTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py deleted file mode 100644 index 88e9d3c287fe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SybaseTableDataset(Dataset): - """The Sybase table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Sybase table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py deleted file mode 100644 index 6e02b0d389ab..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class TeradataLinkedService(LinkedService): - """Linked service for Teradata data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). - :type server: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py deleted file mode 100644 index aac40efe69e0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class TeradataLinkedService(LinkedService): - """Linked service for Teradata data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). - :type server: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py deleted file mode 100644 index 0f9c023f9553..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range or hash partitioning. Type: string (or Expression with - resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py deleted file mode 100644 index 04824e614ff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range or hash partitioning. Type: string (or Expression with - resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py deleted file mode 100644 index 81d1c8e202c1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. Possible values include: 'None', 'Hash', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.TeradataPartitionOption - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, **kwargs): - super(TeradataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py deleted file mode 100644 index 79d8ccb01f14..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. Possible values include: 'None', 'Hash', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.TeradataPartitionOption - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py deleted file mode 100644 index e396bfd6fb15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataTableDataset, self).__init__(**kwargs) - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) - self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py deleted file mode 100644 index 892707b7f133..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: - super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.database = database - self.table = table - self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py deleted file mode 100644 index 48f32bf10133..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format import DatasetStorageFormat - - -class TextFormat(DatasetStorageFormat): - """The data stored in text format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode - encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following - link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param treat_empty_as_null: Treat empty column values in the text file as - null. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type treat_empty_as_null: object - :param skip_line_count: The number of lines/rows to be skipped when - parsing text files. The default value is 0. Type: integer (or Expression - with resultType integer). - :type skip_line_count: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TextFormat, self).__init__(**kwargs) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.escape_char = kwargs.get('escape_char', None) - self.quote_char = kwargs.get('quote_char', None) - self.null_value = kwargs.get('null_value', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_line_count = kwargs.get('skip_line_count', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py deleted file mode 100644 index 0d876f62b112..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class TextFormat(DatasetStorageFormat): - """The data stored in text format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode - encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following - link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param treat_empty_as_null: Treat empty column values in the text file as - null. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type treat_empty_as_null: object - :param skip_line_count: The number of lines/rows to be skipped when - parsing text files. The default value is 0. Type: integer (or Expression - with resultType integer). - :type skip_line_count: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: - super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.escape_char = escape_char - self.quote_char = quote_char - self.null_value = null_value - self.encoding_name = encoding_name - self.treat_empty_as_null = treat_empty_as_null - self.skip_line_count = skip_line_count - self.first_row_as_header = first_row_as_header - self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py deleted file mode 100644 index 728ffc32bcb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Trigger(Model): - """Azure data factory nested object which contains information about creating - pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} - } - - def __init__(self, **kwargs): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.runtime_state = None - self.annotations = kwargs.get('annotations', None) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py deleted file mode 100644 index 089aa9a3e5fc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dependency_reference import DependencyReference - - -class TriggerDependencyReference(DependencyReference): - """Trigger referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - } - - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } - - def __init__(self, **kwargs): - super(TriggerDependencyReference, self).__init__(**kwargs) - self.reference_trigger = kwargs.get('reference_trigger', None) - self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py deleted file mode 100644 index 716a0d926f8b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dependency_reference_py3 import DependencyReference - - -class TriggerDependencyReference(DependencyReference): - """Trigger referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - } - - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } - - def __init__(self, *, reference_trigger, **kwargs) -> None: - super(TriggerDependencyReference, self).__init__(**kwargs) - self.reference_trigger = reference_trigger - self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py deleted file mode 100644 index 70c9f2904347..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerPipelineReference(Model): - """Pipeline that needs to be triggered with the given parameters. - - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - """ - - _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = kwargs.get('pipeline_reference', None) - self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py deleted file mode 100644 index e32af8006326..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerPipelineReference(Model): - """Pipeline that needs to be triggered with the given parameters. - - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - """ - - _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = pipeline_reference - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py deleted file mode 100644 index 862973544ab4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Trigger(Model): - """Azure data factory nested object which contains information about creating - pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} - } - - def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: - super(Trigger, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.runtime_state = None - self.annotations = annotations - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py deleted file mode 100644 index a4f952dac85f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerReference(Model): - """Trigger reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: - "TriggerReference" . - :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "TriggerReference" - - def __init__(self, **kwargs): - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py deleted file mode 100644 index 805e407e80a7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerReference(Model): - """Trigger reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: - "TriggerReference" . - :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "TriggerReference" - - def __init__(self, *, reference_name: str, **kwargs) -> None: - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py deleted file mode 100644 index 539ac4775350..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource import SubResource - - -class TriggerResource(SubResource): - """Trigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, - } - - def __init__(self, **kwargs): - super(TriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py deleted file mode 100644 index 1a7a003f4a6e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class TriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`TriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[TriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(TriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py deleted file mode 100644 index ae6a04ac3128..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class TriggerResource(SubResource): - """Trigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(TriggerResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py deleted file mode 100644 index 9fad7bbfd9fa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerRun(Model): - """Trigger runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar trigger_run_id: Trigger run id. - :vartype trigger_run_id: str - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar trigger_type: Trigger type. - :vartype trigger_type: str - :ivar trigger_run_timestamp: Trigger run start time. - :vartype trigger_run_timestamp: datetime - :ivar status: Trigger run status. Possible values include: 'Succeeded', - 'Failed', 'Inprogress' - :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus - :ivar message: Trigger error message. - :vartype message: str - :ivar properties: List of property name and value related to trigger run. - Name, value pair depends on type of trigger. - :vartype properties: dict[str, str] - :ivar triggered_pipelines: List of pipeline name and run Id triggered by - the trigger run. - :vartype triggered_pipelines: dict[str, str] - """ - - _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - } - - def __init__(self, **kwargs): - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.trigger_run_id = None - self.trigger_name = None - self.trigger_type = None - self.trigger_run_timestamp = None - self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py deleted file mode 100644 index 5a9fe50f6894..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerRun(Model): - """Trigger runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar trigger_run_id: Trigger run id. - :vartype trigger_run_id: str - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar trigger_type: Trigger type. - :vartype trigger_type: str - :ivar trigger_run_timestamp: Trigger run start time. - :vartype trigger_run_timestamp: datetime - :ivar status: Trigger run status. Possible values include: 'Succeeded', - 'Failed', 'Inprogress' - :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus - :ivar message: Trigger error message. - :vartype message: str - :ivar properties: List of property name and value related to trigger run. - Name, value pair depends on type of trigger. - :vartype properties: dict[str, str] - :ivar triggered_pipelines: List of pipeline name and run Id triggered by - the trigger run. - :vartype triggered_pipelines: dict[str, str] - """ - - _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.trigger_run_id = None - self.trigger_name = None - self.trigger_type = None - self.trigger_run_timestamp = None - self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py deleted file mode 100644 index 7684fe7eb7dc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerRunsQueryResponse(Model): - """A list of trigger runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py deleted file mode 100644 index 391a2441b3d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerRunsQueryResponse(Model): - """A list of trigger runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py deleted file mode 100644 index 939624ae5042..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger import Trigger - - -class TumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline runs for all fixed time interval windows - from a start time without gaps and also supports backfill scenarios (when - start time is in the past). - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when an - event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' - :type frequency: str or - ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum - interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the - trigger during which events are fired for windows that are ready. Only UTC - time is currently supported. - :type start_time: datetime - :param end_time: The end time for the time period for the trigger during - which events are fired for windows that are ready. Only UTC time is - currently supported. - :type end_time: datetime - :param delay: Specifies how long the trigger waits past due time before - triggering new run. It doesn't alter window start and end time. The - default is 0. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline - runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling - window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, - } - - def __init__(self, **kwargs): - super(TumblingWindowTrigger, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.delay = kwargs.get('delay', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.retry_policy = kwargs.get('retry_policy', None) - self.depends_on = kwargs.get('depends_on', None) - self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py deleted file mode 100644 index 89dcefbc8c09..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_dependency_reference import TriggerDependencyReference - - -class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): - """Referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window - when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) - self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py deleted file mode 100644 index 648f25e59937..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_dependency_reference_py3 import TriggerDependencyReference - - -class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): - """Referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window - when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: - super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) - self.offset = offset - self.size = size - self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py deleted file mode 100644 index 6856629c8b91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_py3 import Trigger - - -class TumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline runs for all fixed time interval windows - from a start time without gaps and also supports backfill scenarios (when - start time is in the past). - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when an - event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' - :type frequency: str or - ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum - interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the - trigger during which events are fired for windows that are ready. Only UTC - time is currently supported. - :type start_time: datetime - :param end_time: The end time for the time period for the trigger during - which events are fired for windows that are ready. Only UTC time is - currently supported. - :type end_time: datetime - :param delay: Specifies how long the trigger waits past due time before - triggering new run. It doesn't alter window start and end time. The - default is 0. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline - runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling - window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, - } - - def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipeline = pipeline - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.delay = delay - self.max_concurrency = max_concurrency - self.retry_policy = retry_policy - self.depends_on = depends_on - self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py deleted file mode 100644 index eede36501d6c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class UntilActivity(ControlActivity): - """This activity executes inner activities until the specified boolean - expression results to true or timeout is reached, whichever is earlier. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - The loop will continue until this expression evaluates to true - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: - string (or Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, **kwargs): - super(UntilActivity, self).__init__(**kwargs) - self.expression = kwargs.get('expression', None) - self.timeout = kwargs.get('timeout', None) - self.activities = kwargs.get('activities', None) - self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py deleted file mode 100644 index 40c03ce18591..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class UntilActivity(ControlActivity): - """This activity executes inner activities until the specified boolean - expression results to true or timeout is reached, whichever is earlier. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - The loop will continue until this expression evaluates to true - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: - string (or Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: - super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.expression = expression - self.timeout = timeout - self.activities = activities - self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py deleted file mode 100644 index c6460310225a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UpdateIntegrationRuntimeNodeRequest(Model): - """Update integration runtime node request. - - :param concurrent_jobs_limit: The number of concurrent jobs permitted to - run on the integration runtime node. Values between 1 and - maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - """ - - _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, - } - - _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py deleted file mode 100644 index de1605885139..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UpdateIntegrationRuntimeNodeRequest(Model): - """Update integration runtime node request. - - :param concurrent_jobs_limit: The number of concurrent jobs permitted to - run on the integration runtime node. Values between 1 and - maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - """ - - _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, - } - - _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - } - - def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = concurrent_jobs_limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py deleted file mode 100644 index bd5e332b50f5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UpdateIntegrationRuntimeRequest(Model): - """Update integration runtime request. - - :param auto_update: Enables or disables the auto-update feature of the - self-hosted integration runtime. See - https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: - 'On', 'Off' - :type auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., - PT03H is 3 hours. The integration runtime auto update will happen on that - time. - :type update_delay_offset: str - """ - - _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = kwargs.get('auto_update', None) - self.update_delay_offset = kwargs.get('update_delay_offset', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py deleted file mode 100644 index 731cb942b472..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UpdateIntegrationRuntimeRequest(Model): - """Update integration runtime request. - - :param auto_update: Enables or disables the auto-update feature of the - self-hosted integration runtime. See - https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: - 'On', 'Off' - :type auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., - PT03H is 3 hours. The integration runtime auto update will happen on that - time. - :type update_delay_offset: str - """ - - _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, - } - - def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = auto_update - self.update_delay_offset = update_delay_offset diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py deleted file mode 100644 index b51e313b6f0c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UserAccessPolicy(Model): - """Get Data Plane read only token request definition. - - :param permissions: The string with permissions for Data Plane access. - Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to - factory. Currently only empty string is supported which corresponds to the - factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default - is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current - time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for - the token is eight hours and by default the token will expire in eight - hours. - :type expire_time: str - """ - - _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = kwargs.get('permissions', None) - self.access_resource_path = kwargs.get('access_resource_path', None) - self.profile_name = kwargs.get('profile_name', None) - self.start_time = kwargs.get('start_time', None) - self.expire_time = kwargs.get('expire_time', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py deleted file mode 100644 index 26e2a7639a09..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UserAccessPolicy(Model): - """Get Data Plane read only token request definition. - - :param permissions: The string with permissions for Data Plane access. - Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to - factory. Currently only empty string is supported which corresponds to the - factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default - is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current - time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for - the token is eight hours and by default the token will expire in eight - hours. - :type expire_time: str - """ - - _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, - } - - def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = permissions - self.access_resource_path = access_resource_path - self.profile_name = profile_name - self.start_time = start_time - self.expire_time = expire_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py deleted file mode 100644 index 30692d2960ec..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UserProperty(Model): - """User property. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression - with resultType string). - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(UserProperty, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py deleted file mode 100644 index 7b4f3beb0195..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UserProperty(Model): - """User property. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression - with resultType string). - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, *, name: str, value, **kwargs) -> None: - super(UserProperty, self).__init__(**kwargs) - self.name = name - self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py deleted file mode 100644 index 0d92d32c12b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class ValidationActivity(ControlActivity): - """This activity verifies that an external resource exists. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param sleep: A delay in seconds between validation attempts. If no value - is specified, 10 seconds will be used as the default. Type: integer (or - Expression with resultType integer). - :type sleep: object - :param minimum_size: Can be used if dataset points to a file. The file - must be greater than or equal in size to the value specified. Type: - integer (or Expression with resultType integer). - :type minimum_size: object - :param child_items: Can be used if dataset points to a folder. If set to - true, the folder must have at least one file. If set to false, the folder - must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, **kwargs): - super(ValidationActivity, self).__init__(**kwargs) - self.timeout = kwargs.get('timeout', None) - self.sleep = kwargs.get('sleep', None) - self.minimum_size = kwargs.get('minimum_size', None) - self.child_items = kwargs.get('child_items', None) - self.dataset = kwargs.get('dataset', None) - self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py deleted file mode 100644 index f4680400b447..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class ValidationActivity(ControlActivity): - """This activity verifies that an external resource exists. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param sleep: A delay in seconds between validation attempts. If no value - is specified, 10 seconds will be used as the default. Type: integer (or - Expression with resultType integer). - :type sleep: object - :param minimum_size: Can be used if dataset points to a file. The file - must be greater than or equal in size to the value specified. Type: - integer (or Expression with resultType integer). - :type minimum_size: object - :param child_items: Can be used if dataset points to a folder. If set to - true, the folder must have at least one file. If set to false, the folder - must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: - super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.timeout = timeout - self.sleep = sleep - self.minimum_size = minimum_size - self.child_items = child_items - self.dataset = dataset - self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py deleted file mode 100644 index 6d7fd808fa44..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class VariableSpecification(Model): - """Definition of a single variable for a Pipeline. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: 'String', - 'Bool', 'Array' - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VariableSpecification, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.default_value = kwargs.get('default_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py deleted file mode 100644 index d60b3b4b1591..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class VariableSpecification(Model): - """Definition of a single variable for a Pipeline. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: 'String', - 'Bool', 'Array' - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, *, type, default_value=None, **kwargs) -> None: - super(VariableSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py deleted file mode 100644 index 6b5e8d0103f5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class VerticaLinkedService(LinkedService): - """Vertica linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VerticaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py deleted file mode 100644 index 3aee3a5ae0f6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class VerticaLinkedService(LinkedService): - """Vertica linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py deleted file mode 100644 index d0b642f15d38..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class VerticaSource(CopySource): - """A copy activity Vertica source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VerticaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py deleted file mode 100644 index a1c4d755f2b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class VerticaSource(CopySource): - """A copy activity Vertica source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py deleted file mode 100644 index 151a0d000e3f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class VerticaTableDataset(Dataset): - """Vertica dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Vertica. Type: string (or Expression - with resultType string). - :type table: object - :param vertica_table_dataset_schema: The schema name of the Vertica. Type: - string (or Expression with resultType string). - :type vertica_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VerticaTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) - self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py deleted file mode 100644 index 4c2fc8da32ad..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class VerticaTableDataset(Dataset): - """Vertica dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Vertica. Type: string (or Expression - with resultType string). - :type table: object - :param vertica_table_dataset_schema: The schema name of the Vertica. Type: - string (or Expression with resultType string). - :type vertica_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: - super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.vertica_table_dataset_schema = vertica_table_dataset_schema - self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py deleted file mode 100644 index 91f3decc7473..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class WaitActivity(ControlActivity): - """This activity suspends pipeline execution for the specified interval. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(WaitActivity, self).__init__(**kwargs) - self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) - self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py deleted file mode 100644 index ff85c9d16733..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class WaitActivity(ControlActivity): - """This activity suspends pipeline execution for the specified interval. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, - } - - def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.wait_time_in_seconds = wait_time_in_seconds - self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py deleted file mode 100644 index 70264719d52e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity import ExecutionActivity - - -class WebActivity(ExecutionActivity): - """Web activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE' - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string - (or Expression with resultType string). - :type url: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - } - - def __init__(self, **kwargs): - super(WebActivity, self).__init__(**kwargs) - self.method = kwargs.get('method', None) - self.url = kwargs.get('url', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) - self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py deleted file mode 100644 index 6ebb193ae5e9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class WebActivityAuthentication(Model): - """Web activity authentication properties. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) - :type type: str - :param pfx: Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecureString - :param username: Web activity authentication user name for basic - authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - :param resource: Resource for which Azure Auth token will be requested - when using MSI Authentication. - :type resource: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecureString'}, - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - 'resource': {'key': 'resource', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.pfx = kwargs.get('pfx', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.resource = kwargs.get('resource', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py deleted file mode 100644 index 4c2b68ba7161..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class WebActivityAuthentication(Model): - """Web activity authentication properties. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) - :type type: str - :param pfx: Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecureString - :param username: Web activity authentication user name for basic - authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - :param resource: Resource for which Azure Auth token will be requested - when using MSI Authentication. - :type resource: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecureString'}, - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - 'resource': {'key': 'resource', 'type': 'str'}, - } - - def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = type - self.pfx = pfx - self.username = username - self.password = password - self.resource = resource diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py deleted file mode 100644 index 9a64114a00c6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class WebActivity(ExecutionActivity): - """Web activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE' - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string - (or Expression with resultType string). - :type url: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - } - - def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: - super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.method = method - self.url = url - self.headers = headers - self.body = body - self.authentication = authentication - self.datasets = datasets - self.linked_services = linked_services - self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py deleted file mode 100644 index d3bd2f2594ab..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties import WebLinkedServiceTypeProperties - - -class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses anonymous authentication to communicate with - an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebAnonymousAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py deleted file mode 100644 index ee7a4e780a1f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - - -class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses anonymous authentication to communicate with - an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__(self, *, url, **kwargs) -> None: - super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py deleted file mode 100644 index 90050f7dae28..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties import WebLinkedServiceTypeProperties - - -class WebBasicAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses basic authentication to communicate with an - HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param username: Required. User name for Basic authentication. Type: - string (or Expression with resultType string). - :type username: object - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, **kwargs): - super(WebBasicAuthentication, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py deleted file mode 100644 index 71577ec86565..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - - -class WebBasicAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses basic authentication to communicate with an - HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param username: Required. User name for Basic authentication. Type: - string (or Expression with resultType string). - :type username: object - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, username, password, **kwargs) -> None: - super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.username = username - self.password = password - self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py deleted file mode 100644 index 671808ca85d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties import WebLinkedServiceTypeProperties - - -class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses client certificate based authentication to - communicate with an HTTP endpoint. This scheme follows mutual - authentication; the server must also provide valid credentials to the - client. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, **kwargs): - super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.pfx = kwargs.get('pfx', None) - self.password = kwargs.get('password', None) - self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py deleted file mode 100644 index 7ac859b677a8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - - -class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses client certificate based authentication to - communicate with an HTTP endpoint. This scheme follows mutual - authentication; the server must also provide valid credentials to the - client. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, pfx, password, **kwargs) -> None: - super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.pfx = pfx - self.password = password - self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py deleted file mode 100644 index 1c648c42c3e2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity import ControlActivity - - -class WebHookActivity(ControlActivity): - """WebHook activity. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :ivar method: Required. Rest API method for target endpoint. Default - value: "POST" . - :vartype method: str - :param url: Required. WebHook activity target endpoint and path. Type: - string (or Expression with resultType string). - :type url: object - :param timeout: The timeout within which the webhook should be called - back. If there is no value specified, it defaults to 10 minutes. Type: - string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - } - - method = "POST" - - def __init__(self, **kwargs): - super(WebHookActivity, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.timeout = kwargs.get('timeout', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py deleted file mode 100644 index 40cdc6f732da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class WebHookActivity(ControlActivity): - """WebHook activity. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :ivar method: Required. Rest API method for target endpoint. Default - value: "POST" . - :vartype method: str - :param url: Required. WebHook activity target endpoint and path. Type: - string (or Expression with resultType string). - :type url: object - :param timeout: The timeout within which the webhook should be called - back. If there is no value specified, it defaults to 10 minutes. Type: - string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - } - - method = "POST" - - def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: - super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.url = url - self.timeout = timeout - self.headers = headers - self.body = body - self.authentication = authentication - self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py deleted file mode 100644 index 18fadba3f3ee..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class WebLinkedService(LinkedService): - """Web linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Web linked service properties. - :type type_properties: - ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, - } - - def __init__(self, **kwargs): - super(WebLinkedService, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) - self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py deleted file mode 100644 index 3e491b0fac4d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class WebLinkedService(LinkedService): - """Web linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Web linked service properties. - :type type_properties: - ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, - } - - def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type_properties = type_properties - self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py deleted file mode 100644 index 22290e80b19f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class WebLinkedServiceTypeProperties(Model): - """Base definition of WebLinkedServiceTypeProperties, this typeProperties is - polymorphic based on authenticationType, so not flattened in SDK models. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebClientCertificateAuthentication, - WebBasicAuthentication, WebAnonymousAuthentication - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - _subtype_map = { - 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} - } - - def __init__(self, **kwargs): - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py deleted file mode 100644 index 1c162c2f1004..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class WebLinkedServiceTypeProperties(Model): - """Base definition of WebLinkedServiceTypeProperties, this typeProperties is - polymorphic based on authenticationType, so not flattened in SDK models. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebClientCertificateAuthentication, - WebBasicAuthentication, WebAnonymousAuthentication - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - _subtype_map = { - 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} - } - - def __init__(self, *, url, **kwargs) -> None: - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = url - self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py deleted file mode 100644 index c5d3a2a8f00a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class WebSource(CopySource): - """A copy activity source for web page table. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebSource, self).__init__(**kwargs) - self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py deleted file mode 100644 index 684e1d4233cc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class WebSource(CopySource): - """A copy activity source for web page table. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py deleted file mode 100644 index 3980fe3d885a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class WebTableDataset(Dataset): - """The dataset points to a HTML table in the web page. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index: Required. The zero-based index of the table in the web page. - Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object - :param path: The relative URL to the web page from the linked service URL. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(WebTableDataset, self).__init__(**kwargs) - self.index = kwargs.get('index', None) - self.path = kwargs.get('path', None) - self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py deleted file mode 100644 index edb2344c35d2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class WebTableDataset(Dataset): - """The dataset points to a HTML table in the web page. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index: Required. The zero-based index of the table in the web page. - Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object - :param path: The relative URL to the web page from the linked service URL. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: - super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.index = index - self.path = path - self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py deleted file mode 100644 index 24973f577133..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class XeroLinkedService(LinkedService): - """Xero Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Xero server. (i.e. - api.xero.com) - :type host: object - :param consumer_key: The consumer key associated with the Xero - application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated - for your Xero private application. You must include all the text from the - .pem file, including the Unix line endings( - ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(XeroLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.private_key = kwargs.get('private_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py deleted file mode 100644 index 433c65ade739..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class XeroLinkedService(LinkedService): - """Xero Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Xero server. (i.e. - api.xero.com) - :type host: object - :param consumer_key: The consumer key associated with the Xero - application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated - for your Xero private application. You must include all the text from the - .pem file, including the Unix line endings( - ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.consumer_key = consumer_key - self.private_key = private_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py deleted file mode 100644 index 53c5edd44cec..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class XeroObjectDataset(Dataset): - """Xero Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(XeroObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py deleted file mode 100644 index 673d41e1771e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class XeroObjectDataset(Dataset): - """Xero Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py deleted file mode 100644 index a37852a5b419..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class XeroSource(CopySource): - """A copy activity Xero Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(XeroSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py deleted file mode 100644 index bbee6c6fa1f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class XeroSource(CopySource): - """A copy activity Xero Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py deleted file mode 100644 index fe34dff77ea9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service import LinkedService - - -class ZohoLinkedService(LinkedService): - """Zoho server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Zoho server. (i.e. - crm.zoho.com/crm/private) - :type endpoint: object - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ZohoLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py deleted file mode 100644 index f82f6221592b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ZohoLinkedService(LinkedService): - """Zoho server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Zoho server. (i.e. - crm.zoho.com/crm/private) - :type endpoint: object - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py deleted file mode 100644 index 062d508860a6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset import Dataset - - -class ZohoObjectDataset(Dataset): - """Zoho server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ZohoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py deleted file mode 100644 index ef5a67d4fe35..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ZohoObjectDataset(Dataset): - """Zoho server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py deleted file mode 100644 index 274c6fc09f19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source import CopySource - - -class ZohoSource(CopySource): - """A copy activity Zoho server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ZohoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py deleted file mode 100644 index 6d7dc29bdf8a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ZohoSource(CopySource): - """A copy activity Zoho server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 826179f5fb63..013675bd0200 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -9,20 +9,20 @@ # regenerated. # -------------------------------------------------------------------------- -from .operations import Operations -from .factories_operations import FactoriesOperations -from .exposure_control_operations import ExposureControlOperations -from .integration_runtimes_operations import IntegrationRuntimesOperations -from .integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations -from .integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations -from .linked_services_operations import LinkedServicesOperations -from .datasets_operations import DatasetsOperations -from .pipelines_operations import PipelinesOperations -from .pipeline_runs_operations import PipelineRunsOperations -from .activity_runs_operations import ActivityRunsOperations -from .triggers_operations import TriggersOperations -from .trigger_runs_operations import TriggerRunsOperations -from .rerun_triggers_operations import RerunTriggersOperations +from ._operations import Operations +from ._factories_operations import FactoriesOperations +from ._exposure_control_operations import ExposureControlOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations +from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from ._linked_services_operations import LinkedServicesOperations +from ._datasets_operations import DatasetsOperations +from ._pipelines_operations import PipelinesOperations +from ._pipeline_runs_operations import PipelineRunsOperations +from ._activity_runs_operations import ActivityRunsOperations +from ._triggers_operations import TriggersOperations +from ._trigger_runs_operations import TriggerRunsOperations +from ._rerun_triggers_operations import RerunTriggersOperations __all__ = [ 'Operations', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py similarity index 97% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py index f338a1a9c835..4d9d0775cb0f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py @@ -19,6 +19,8 @@ class ActivityRunsOperations(object): """ActivityRunsOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -98,7 +100,6 @@ def query_by_pipeline_run( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('ActivityRunsQueryResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py similarity index 97% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py index 278815d03479..89feb52cc2cd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py @@ -19,6 +19,8 @@ class DatasetsOperations(object): """DatasetsOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,8 +57,7 @@ def list_by_factory( ~azure.mgmt.datafactory.models.DatasetResourcePaged[~azure.mgmt.datafactory.models.DatasetResource] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -87,6 +88,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -97,12 +103,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} @@ -175,7 +179,6 @@ def create_or_update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('DatasetResource', response) @@ -246,7 +249,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('DatasetResource', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py index 4a648d96586c..443a826821a5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py @@ -19,6 +19,8 @@ class ExposureControlOperations(object): """ExposureControlOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -95,7 +97,6 @@ def get_feature_value( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('ExposureControlResponse', response) @@ -167,7 +168,6 @@ def get_feature_value_by_factory( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('ExposureControlResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py index b06c12f3e8c5..828834a91c49 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py @@ -19,6 +19,8 @@ class FactoriesOperations(object): """FactoriesOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -51,8 +53,7 @@ def list( ~azure.mgmt.datafactory.models.FactoryPaged[~azure.mgmt.datafactory.models.Factory] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list.metadata['url'] @@ -81,6 +82,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -91,12 +97,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} @@ -160,7 +164,6 @@ def configure_factory_repo( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -187,8 +190,7 @@ def list_by_resource_group( ~azure.mgmt.datafactory.models.FactoryPaged[~azure.mgmt.datafactory.models.Factory] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_resource_group.metadata['url'] @@ -218,6 +220,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -228,12 +235,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} @@ -301,7 +306,6 @@ def create_or_update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -373,7 +377,6 @@ def update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -441,7 +444,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -564,7 +566,6 @@ def get_git_hub_access_token( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('GitHubAccessTokenResponse', response) @@ -632,7 +633,6 @@ def get_data_plane_access( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('AccessPolicyResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py similarity index 99% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py index 81467b9e3385..870f990c1f10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py @@ -19,6 +19,8 @@ class IntegrationRuntimeNodesOperations(object): """IntegrationRuntimeNodesOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -96,7 +98,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', response) @@ -235,7 +236,6 @@ def update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', response) @@ -304,7 +304,6 @@ def get_ip_address( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py index 230f12d023c3..aa8b795123ef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -21,6 +21,8 @@ class IntegrationRuntimeObjectMetadataOperations(object): """IntegrationRuntimeObjectMetadataOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -206,7 +208,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('SsisObjectMetadataListResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py similarity index 99% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py index 0a64be3b1441..f31eeeb0952f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py @@ -21,6 +21,8 @@ class IntegrationRuntimesOperations(object): """IntegrationRuntimesOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,8 +59,7 @@ def list_by_factory( ~azure.mgmt.datafactory.models.IntegrationRuntimeResourcePaged[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -89,6 +90,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -99,12 +105,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} @@ -177,7 +181,6 @@ def create_or_update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeResource', response) @@ -248,7 +251,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeResource', response) @@ -329,7 +331,6 @@ def update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeResource', response) @@ -452,7 +453,6 @@ def get_status( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) @@ -520,7 +520,6 @@ def get_connection_info( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', response) @@ -595,7 +594,6 @@ def regenerate_auth_key( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeAuthKeys', response) @@ -660,7 +658,6 @@ def list_auth_keys( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeAuthKeys', response) @@ -970,7 +967,6 @@ def get_monitoring_data( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeMonitoringData', response) @@ -1169,7 +1165,6 @@ def create_linked_integration_runtime( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py similarity index 97% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py index e6878336df91..5e7d32bd357a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py @@ -19,6 +19,8 @@ class LinkedServicesOperations(object): """LinkedServicesOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,8 +57,7 @@ def list_by_factory( ~azure.mgmt.datafactory.models.LinkedServiceResourcePaged[~azure.mgmt.datafactory.models.LinkedServiceResource] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -87,6 +88,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -97,12 +103,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} @@ -175,7 +179,6 @@ def create_or_update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('LinkedServiceResource', response) @@ -246,7 +249,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('LinkedServiceResource', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py similarity index 90% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py index 2273e12d5ada..2363a74cd143 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py @@ -19,6 +19,8 @@ class Operations(object): """Operations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -51,8 +53,7 @@ def list( ~azure.mgmt.datafactory.models.OperationPaged[~azure.mgmt.datafactory.models.Operation] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list.metadata['url'] @@ -77,6 +78,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -87,12 +93,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py index de8744612d20..4fe443938ef5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py @@ -19,6 +19,8 @@ class PipelineRunsOperations(object): """PipelineRunsOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -95,7 +97,6 @@ def query_by_factory( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('PipelineRunsQueryResponse', response) @@ -160,7 +161,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('PipelineRun', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 343396e705ac..00201749beee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -19,6 +19,8 @@ class PipelinesOperations(object): """PipelinesOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -55,8 +57,7 @@ def list_by_factory( ~azure.mgmt.datafactory.models.PipelineResourcePaged[~azure.mgmt.datafactory.models.PipelineResource] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -87,6 +88,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -97,12 +103,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} @@ -173,7 +177,6 @@ def create_or_update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('PipelineResource', response) @@ -244,7 +247,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('PipelineResource', response) @@ -392,7 +394,6 @@ def create_run( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('CreateRunResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py index 58e0066a60dd..6d5f8e9831de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py @@ -21,6 +21,8 @@ class RerunTriggersOperations(object): """RerunTriggersOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -104,7 +106,6 @@ def create( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('TriggerResource', response) @@ -396,8 +397,7 @@ def list_by_trigger( ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_trigger.metadata['url'] @@ -429,6 +429,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -439,12 +444,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py index e4e4774ae3bc..6cbf968008a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py @@ -19,6 +19,8 @@ class TriggerRunsOperations(object): """TriggerRunsOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -154,7 +156,6 @@ def query_by_factory( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('TriggerRunsQueryResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index f80cfcb2870b..caeda2fcdc91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -21,6 +21,8 @@ class TriggersOperations(object): """TriggersOperations operations. + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,8 +59,7 @@ def list_by_factory( ~azure.mgmt.datafactory.models.TriggerResourcePaged[~azure.mgmt.datafactory.models.TriggerResource] :raises: :class:`CloudError` """ - def internal_paging(next_link=None, raw=False): - + def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -89,6 +90,11 @@ def internal_paging(next_link=None, raw=False): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -99,12 +105,10 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies) - + header_dict = None if raw: header_dict = {} - client_raw_response = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - return client_raw_response + deserialized = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} @@ -177,7 +181,6 @@ def create_or_update( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('TriggerResource', response) @@ -248,7 +251,6 @@ def get( raise exp deserialized = None - if response.status_code == 200: deserialized = self._deserialize('TriggerResource', response) From e41dd17fa9c57bd7b92ffc9ad3c6d294c410379c Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 14 Aug 2019 14:28:37 -0700 Subject: [PATCH 20/30] Generated from d22072afd73683450b42a2d626e10013330ab31b (#6795) event triggers subcription apis --- .../azure/mgmt/datafactory/models/__init__.py | 5 + .../_data_factory_management_client_enums.py | 9 + .../azure/mgmt/datafactory/models/_models.py | 191 +++++++++++ .../mgmt/datafactory/models/_models_py3.py | 317 ++++++++++++++---- .../operations/_triggers_operations.py | 262 +++++++++++++++ 5 files changed, 721 insertions(+), 63 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index aae612d71bb6..b76d84d11b17 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -491,6 +491,7 @@ from ._models_py3 import TriggerResource from ._models_py3 import TriggerRun from ._models_py3 import TriggerRunsQueryResponse + from ._models_py3 import TriggerSubscriptionOperationStatus from ._models_py3 import TumblingWindowTrigger from ._models_py3 import TumblingWindowTriggerDependencyReference from ._models_py3 import UntilActivity @@ -1002,6 +1003,7 @@ from ._models import TriggerResource from ._models import TriggerRun from ._models import TriggerRunsQueryResponse + from ._models import TriggerSubscriptionOperationStatus from ._models import TumblingWindowTrigger from ._models import TumblingWindowTriggerDependencyReference from ._models import UntilActivity @@ -1046,6 +1048,7 @@ DependencyCondition, VariableType, TriggerRuntimeState, + EventSubscriptionStatus, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrderByField, @@ -1594,6 +1597,7 @@ 'TriggerResource', 'TriggerRun', 'TriggerRunsQueryResponse', + 'TriggerSubscriptionOperationStatus', 'TumblingWindowTrigger', 'TumblingWindowTriggerDependencyReference', 'UntilActivity', @@ -1637,6 +1641,7 @@ 'DependencyCondition', 'VariableType', 'TriggerRuntimeState', + 'EventSubscriptionStatus', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrderByField', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 45448073f831..eda785276fdb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -65,6 +65,15 @@ class TriggerRuntimeState(str, Enum): disabled = "Disabled" +class EventSubscriptionStatus(str, Enum): + + enabled = "Enabled" + provisioning = "Provisioning" + deprovisioning = "Deprovisioning" + disabled = "Disabled" + unknown = "Unknown" + + class RunQueryFilterOperand(str, Enum): pipeline_name = "PipelineName" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index bbb7b343f03d..b9f2574d0dc2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -1565,6 +1565,10 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -1580,6 +1584,7 @@ class CopySink(Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -1595,6 +1600,7 @@ def __init__(self, **kwargs): self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.table_option = kwargs.get('table_option', None) self.type = None @@ -1624,6 +1630,10 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. @@ -1643,6 +1653,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, @@ -2196,6 +2207,10 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -2213,6 +2228,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2909,6 +2925,10 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that @@ -2933,6 +2953,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, @@ -3432,6 +3453,10 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -3451,6 +3476,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, @@ -4287,6 +4313,10 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4305,6 +4335,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -4504,6 +4535,10 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4522,6 +4557,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -4675,6 +4711,10 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -4690,6 +4730,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -4785,6 +4826,10 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -4804,6 +4849,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -5263,6 +5309,10 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -5294,6 +5344,7 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -5601,6 +5652,10 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -5628,6 +5683,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -5857,6 +5913,10 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. @@ -5874,6 +5934,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -6139,6 +6200,10 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -6165,6 +6230,7 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -6738,6 +6804,10 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -6761,6 +6831,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -7318,6 +7389,10 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be @@ -7338,6 +7413,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -8845,6 +8921,10 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. @@ -8865,6 +8945,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, @@ -9117,6 +9198,10 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -9138,6 +9223,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, @@ -9748,6 +9834,10 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -9771,6 +9861,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10016,6 +10107,10 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -10039,6 +10134,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -11194,6 +11290,10 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -11211,6 +11311,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14590,6 +14691,10 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -14608,6 +14713,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -16833,6 +16939,10 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -16851,6 +16961,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18082,6 +18193,10 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -18100,6 +18215,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18933,6 +19049,10 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -18951,6 +19071,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -19280,6 +19401,10 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. @@ -19297,6 +19422,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -22112,6 +22238,10 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22144,6 +22274,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, @@ -22235,6 +22366,10 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22267,6 +22402,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, @@ -22659,6 +22795,10 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22678,6 +22818,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -24988,6 +25129,10 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -25013,6 +25158,7 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, @@ -25111,6 +25257,10 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25142,6 +25292,7 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -25309,6 +25460,10 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25340,6 +25495,7 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -25573,6 +25729,10 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25604,6 +25764,7 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -27233,6 +27394,36 @@ def __init__(self, **kwargs): self.continuation_token = kwargs.get('continuation_token', None) +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None + + class TumblingWindowTrigger(Trigger): """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index fb632f37b204..2417a41c42eb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -1565,6 +1565,10 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -1580,6 +1584,7 @@ class CopySink(Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -1587,7 +1592,7 @@ class CopySink(Model): 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size @@ -1595,6 +1600,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.table_option = table_option self.type = None @@ -1624,6 +1630,10 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. @@ -1643,13 +1653,14 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'AvroSink' @@ -2196,6 +2207,10 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -2213,12 +2228,13 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.copy_behavior = copy_behavior self.type = 'AzureBlobFSSink' @@ -2909,6 +2925,10 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that @@ -2933,14 +2953,15 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json self.flush_immediately = flush_immediately @@ -3432,6 +3453,10 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -3451,13 +3476,14 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' @@ -4287,6 +4313,10 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4305,12 +4335,13 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzureMySqlSink' @@ -4504,6 +4535,10 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4522,12 +4557,13 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzurePostgreSqlSink' @@ -4675,6 +4711,10 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -4690,11 +4730,12 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.type = 'AzureQueueSink' @@ -4785,6 +4826,10 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -4804,12 +4849,13 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' @@ -5263,6 +5309,10 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -5294,6 +5344,7 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -5302,8 +5353,8 @@ class AzureSqlSink(CopySink): 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -5601,6 +5652,10 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -5628,6 +5683,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -5635,8 +5691,8 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name @@ -5857,6 +5913,10 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. @@ -5874,12 +5934,13 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.store_settings = store_settings self.type = 'BinarySink' @@ -6139,6 +6200,10 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -6165,6 +6230,7 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -6172,8 +6238,8 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header @@ -6738,6 +6804,10 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -6761,6 +6831,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -6768,8 +6839,8 @@ class CommonDataServiceForAppsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'CommonDataServiceForAppsSink' @@ -7318,6 +7389,10 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be @@ -7338,12 +7413,13 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.write_behavior = write_behavior self.type = 'CosmosDbMongoDbApiSink' @@ -8845,6 +8921,10 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. @@ -8865,13 +8945,14 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'DelimitedTextSink' @@ -9117,6 +9198,10 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -9138,13 +9223,14 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.nesting_separator = nesting_separator self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' @@ -9748,6 +9834,10 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -9771,6 +9861,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -9778,8 +9869,8 @@ class DynamicsCrmSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsCrmSink' @@ -10016,6 +10107,10 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -10039,6 +10134,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10046,8 +10142,8 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' @@ -11194,6 +11290,10 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -11211,12 +11311,13 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink' @@ -14590,6 +14691,10 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -14608,12 +14713,13 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'InformixSink' @@ -16833,6 +16939,10 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -16851,12 +16961,13 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'MicrosoftAccessSink' @@ -18082,6 +18193,10 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -18100,12 +18215,13 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' @@ -18933,6 +19049,10 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -18951,12 +19071,13 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink' @@ -19280,6 +19401,10 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. @@ -19297,12 +19422,13 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.store_settings = store_settings self.type = 'ParquetSink' @@ -22112,6 +22238,10 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22144,14 +22274,15 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -22235,6 +22366,10 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22267,14 +22402,15 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -22659,6 +22795,10 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22678,12 +22818,13 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' @@ -24988,6 +25129,10 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -25013,14 +25158,15 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings @@ -25111,6 +25257,10 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25142,6 +25292,7 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -25150,8 +25301,8 @@ class SqlMISink(CopySink): 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -25309,6 +25460,10 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25340,6 +25495,7 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -25348,8 +25504,8 @@ class SqlServerSink(CopySink): 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -25573,6 +25729,10 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25604,6 +25764,7 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, @@ -25612,8 +25773,8 @@ class SqlSink(CopySink): 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script @@ -27233,6 +27394,36 @@ def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: self.continuation_token = continuation_token +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None + + class TumblingWindowTrigger(Trigger): """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index caeda2fcdc91..57e31b1bd8c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -318,6 +318,268 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} + def _subscribe_to_events_initial( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.subscribe_to_events.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def subscribe_to_events( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Subscribe event trigger to events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + TriggerSubscriptionOperationStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._subscribe_to_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} + + def get_event_subscription_status( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + """Get a trigger's event subscription status. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerSubscriptionOperationStatus or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_event_subscription_status.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} + + + def _unsubscribe_from_events_initial( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.unsubscribe_from_events.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def unsubscribe_from_events( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Unsubscribe event trigger from events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + TriggerSubscriptionOperationStatus or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] + :raises: :class:`CloudError` + """ + raw_result = self._unsubscribe_from_events_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} + + def _start_initial( self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): # Construct URL From 214041b7073388bdd8b85d89b3ce27bd3bbe0f41 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 14 Aug 2019 23:01:20 -0700 Subject: [PATCH 21/30] Generated from 6ca38e062bb3184e7207e058d4aa05656e9a755f (#6800) chore: jsonfmt datafactory --- .../azure/mgmt/datafactory/models/__init__.py | 14 ++ .../_data_factory_management_client_enums.py | 6 + .../azure/mgmt/datafactory/models/_models.py | 225 +++++++++++++++++- .../mgmt/datafactory/models/_models_py3.py | 225 +++++++++++++++++- 4 files changed, 458 insertions(+), 12 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index b76d84d11b17..298f059389b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -271,7 +271,11 @@ from ._models_py3 import JiraLinkedService from ._models_py3 import JiraObjectDataset from ._models_py3 import JiraSource + from ._models_py3 import JsonDataset from ._models_py3 import JsonFormat + from ._models_py3 import JsonSink + from ._models_py3 import JsonSource + from ._models_py3 import JsonWriteSettings from ._models_py3 import LinkedIntegrationRuntime from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization @@ -783,7 +787,11 @@ from ._models import JiraLinkedService from ._models import JiraObjectDataset from ._models import JiraSource + from ._models import JsonDataset from ._models import JsonFormat + from ._models import JsonSink + from ._models import JsonSource + from ._models import JsonWriteSettings from ._models import LinkedIntegrationRuntime from ._models import LinkedIntegrationRuntimeKeyAuthorization from ._models import LinkedIntegrationRuntimeRbacAuthorization @@ -1100,6 +1108,7 @@ SalesforceSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, PolybaseSettingsRejectType, + JsonWriteFilePattern, SapCloudForCustomerSinkWriteBehavior, WebHookActivityMethod, IntegrationRuntimeType, @@ -1377,7 +1386,11 @@ 'JiraLinkedService', 'JiraObjectDataset', 'JiraSource', + 'JsonDataset', 'JsonFormat', + 'JsonSink', + 'JsonSource', + 'JsonWriteSettings', 'LinkedIntegrationRuntime', 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeRbacAuthorization', @@ -1693,6 +1706,7 @@ 'SalesforceSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', 'PolybaseSettingsRejectType', + 'JsonWriteFilePattern', 'SapCloudForCustomerSinkWriteBehavior', 'WebHookActivityMethod', 'IntegrationRuntimeType', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index eda785276fdb..8b3e08f32768 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -463,6 +463,12 @@ class PolybaseSettingsRejectType(str, Enum): percentage = "percentage" +class JsonWriteFilePattern(str, Enum): + + set_of_objects = "setOfObjects" + array_of_objects = "arrayOfObjects" + + class SapCloudForCustomerSinkWriteBehavior(str, Enum): insert = "Insert" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index b9f2574d0dc2..f0fadde3e1ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -504,7 +504,8 @@ class Dataset(Model): DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -554,7 +555,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): @@ -654,7 +655,7 @@ class CopySource(Model): MicrosoftAccessSource, InformixSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - DelimitedTextSource, ParquetSource, AvroSource + JsonSource, DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -689,7 +690,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): @@ -1539,7 +1540,7 @@ class CopySink(Model): SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -1589,7 +1590,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -15663,6 +15664,79 @@ def __init__(self, **kwargs): self.type = 'JiraSource' +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(JsonDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression = kwargs.get('compression', None) + self.type = 'Json' + + class JsonFormat(DatasetStorageFormat): """The data stored in JSON format. @@ -15733,6 +15807,145 @@ def __init__(self, **kwargs): self.type = 'JsonFormat' +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, **kwargs): + super(JsonSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'JsonSink' + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(JsonSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'JsonSource' + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JsonWriteSettings, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) + + class LinkedIntegrationRuntime(Model): """The linked integration runtime information. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 2417a41c42eb..bc2d0a34afc6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -504,7 +504,8 @@ class Dataset(Model): DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -554,7 +555,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: @@ -654,7 +655,7 @@ class CopySource(Model): MicrosoftAccessSource, InformixSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - DelimitedTextSource, ParquetSource, AvroSource + JsonSource, DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -689,7 +690,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1539,7 +1540,7 @@ class CopySink(Model): SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -1589,7 +1590,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: @@ -15663,6 +15664,79 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'JiraSource' +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.encoding_name = encoding_name + self.compression = compression + self.type = 'Json' + + class JsonFormat(DatasetStorageFormat): """The data stored in JSON format. @@ -15733,6 +15807,145 @@ def __init__(self, *, additional_properties=None, serializer=None, deserializer= self.type = 'JsonFormat' +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'JsonSink' + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'JsonSource' + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, file_pattern=None, **kwargs) -> None: + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.file_pattern = file_pattern + + class LinkedIntegrationRuntime(Model): """The linked integration runtime information. From 4842bc44fb32e78089f303ad097ef587b0f8e1a9 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Fri, 16 Aug 2019 01:18:11 -0700 Subject: [PATCH 22/30] Generated from 3c745e4716094361aaa9e683d3e6ec582af89f9d (#6815) refactor table option --- .../azure/mgmt/datafactory/models/_models.py | 196 ++--------- .../mgmt/datafactory/models/_models_py3.py | 326 +++++------------- 2 files changed, 125 insertions(+), 397 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index f0fadde3e1ed..ef7778d653b5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -1566,10 +1566,6 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -1585,7 +1581,6 @@ class CopySink(Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -1601,7 +1596,6 @@ def __init__(self, **kwargs): self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.table_option = kwargs.get('table_option', None) self.type = None @@ -1631,10 +1625,6 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. @@ -1654,7 +1644,6 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, @@ -2208,10 +2197,6 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -2229,7 +2214,6 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2926,10 +2910,6 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that @@ -2954,7 +2934,6 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, @@ -3454,10 +3433,6 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -3477,7 +3452,6 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, @@ -4314,10 +4288,6 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4336,7 +4306,6 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -4536,10 +4505,6 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4558,7 +4523,6 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -4712,10 +4676,6 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -4731,7 +4691,6 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -4827,10 +4786,6 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -4850,7 +4805,6 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -5310,10 +5264,6 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -5332,6 +5282,10 @@ class AzureSqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -5345,13 +5299,13 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -5361,6 +5315,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'AzureSqlSink' @@ -5653,10 +5608,6 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -5684,7 +5635,6 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -5914,10 +5864,6 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. @@ -5935,7 +5881,6 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -6201,10 +6146,6 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -6231,7 +6172,6 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -6805,10 +6745,6 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -6832,7 +6768,6 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -7390,10 +7325,6 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be @@ -7414,7 +7345,6 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -8922,10 +8852,6 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. @@ -8946,7 +8872,6 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, @@ -9199,10 +9124,6 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -9224,7 +9145,6 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, @@ -9835,10 +9755,6 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -9862,7 +9778,6 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10108,10 +10023,6 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -10135,7 +10046,6 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -11291,10 +11201,6 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -11312,7 +11218,6 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14692,10 +14597,6 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -14714,7 +14615,6 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -15833,10 +15733,6 @@ class JsonSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. @@ -15856,7 +15752,6 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, @@ -17152,10 +17047,6 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -17174,7 +17065,6 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18406,10 +18296,6 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -18428,7 +18314,6 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -19262,10 +19147,6 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -19284,7 +19165,6 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -19614,10 +19494,6 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. @@ -19635,7 +19511,6 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -22451,10 +22326,6 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22487,7 +22358,6 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, @@ -22579,10 +22449,6 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22615,7 +22481,6 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, @@ -23008,10 +22873,6 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -23031,7 +22892,6 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -25342,10 +25202,6 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -25358,6 +25214,10 @@ class SqlDWSink(CopySink): :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25371,11 +25231,11 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25383,6 +25243,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.allow_poly_base = kwargs.get('allow_poly_base', None) self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlDWSink' @@ -25470,10 +25331,6 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25492,6 +25349,10 @@ class SqlMISink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25505,13 +25366,13 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25521,6 +25382,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlMISink' @@ -25673,10 +25535,6 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25695,6 +25553,10 @@ class SqlServerSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25708,13 +25570,13 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25724,6 +25586,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlServerSink' @@ -25942,10 +25805,6 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25964,6 +25823,10 @@ class SqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25977,13 +25840,13 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25993,6 +25856,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index bc2d0a34afc6..295f650553e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -1566,10 +1566,6 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -1585,7 +1581,6 @@ class CopySink(Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -1593,7 +1588,7 @@ class CopySink(Model): 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size @@ -1601,7 +1596,6 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections - self.table_option = table_option self.type = None @@ -1631,10 +1625,6 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. @@ -1654,14 +1644,13 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'AvroSink' @@ -2208,10 +2197,6 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -2229,13 +2214,12 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'AzureBlobFSSink' @@ -2926,10 +2910,6 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that @@ -2954,15 +2934,14 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json self.flush_immediately = flush_immediately @@ -3454,10 +3433,6 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -3477,14 +3452,13 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' @@ -4314,10 +4288,6 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4336,13 +4306,12 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzureMySqlSink' @@ -4536,10 +4505,6 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4558,13 +4523,12 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzurePostgreSqlSink' @@ -4712,10 +4676,6 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -4731,12 +4691,11 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureQueueSink' @@ -4827,10 +4786,6 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -4850,13 +4805,12 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' @@ -5310,10 +5264,6 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -5332,6 +5282,10 @@ class AzureSqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -5345,22 +5299,23 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'AzureSqlSink' @@ -5653,10 +5608,6 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -5684,7 +5635,6 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -5692,8 +5642,8 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name @@ -5914,10 +5864,6 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. @@ -5935,13 +5881,12 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, **kwargs) -> None: - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'BinarySink' @@ -6201,10 +6146,6 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -6231,7 +6172,6 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -6239,8 +6179,8 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header @@ -6805,10 +6745,6 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -6832,7 +6768,6 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -6840,8 +6775,8 @@ class CommonDataServiceForAppsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'CommonDataServiceForAppsSink' @@ -7390,10 +7325,6 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be @@ -7414,13 +7345,12 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'CosmosDbMongoDbApiSink' @@ -8922,10 +8852,6 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. @@ -8946,14 +8872,13 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'DelimitedTextSink' @@ -9199,10 +9124,6 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -9224,14 +9145,13 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' @@ -9835,10 +9755,6 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -9862,7 +9778,6 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -9870,8 +9785,8 @@ class DynamicsCrmSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsCrmSink' @@ -10108,10 +10023,6 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -10135,7 +10046,6 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10143,8 +10053,8 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' @@ -11291,10 +11201,6 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -11312,13 +11218,12 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink' @@ -14692,10 +14597,6 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -14714,13 +14615,12 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'InformixSink' @@ -15833,10 +15733,6 @@ class JsonSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. @@ -15856,14 +15752,13 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'JsonSink' @@ -17152,10 +17047,6 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -17174,13 +17065,12 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'MicrosoftAccessSink' @@ -18406,10 +18296,6 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -18428,13 +18314,12 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' @@ -19262,10 +19147,6 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -19284,13 +19165,12 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink' @@ -19614,10 +19494,6 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. @@ -19635,13 +19511,12 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, **kwargs) -> None: - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'ParquetSink' @@ -22451,10 +22326,6 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22487,15 +22358,14 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -22579,10 +22449,6 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22615,15 +22481,14 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -23008,10 +22873,6 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -23031,13 +22892,12 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' @@ -25342,10 +25202,6 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -25358,6 +25214,10 @@ class SqlDWSink(CopySink): :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25371,18 +25231,19 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings + self.table_option = table_option self.type = 'SqlDWSink' @@ -25470,10 +25331,6 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25492,6 +25349,10 @@ class SqlMISink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25505,22 +25366,23 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlMISink' @@ -25673,10 +25535,6 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25695,6 +25553,10 @@ class SqlServerSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25708,22 +25570,23 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlServerSink' @@ -25942,10 +25805,6 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25964,6 +25823,10 @@ class SqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25977,22 +25840,23 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlSink' From 635e69bab3732d00e3033f4b598310557becdfd7 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Tue, 20 Aug 2019 00:04:48 -0700 Subject: [PATCH 23/30] Generated from 2658bfcd4e5ede36535616ef4e44125701d14366 (#6832) remove redundant property --- .../azure/mgmt/datafactory/models/_models.py | 54 +++++++++----- .../mgmt/datafactory/models/_models_py3.py | 72 ++++++++++++------- 2 files changed, 85 insertions(+), 41 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index ef7778d653b5..a056f9732314 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -2283,19 +2283,23 @@ def __init__(self, **kwargs): class StoreWriteSettings(Model): """Connector write settings. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -2304,17 +2308,21 @@ class StoreWriteSettings(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} } def __init__(self, **kwargs): super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = None class AzureBlobFSWriteSettings(StoreWriteSettings): @@ -2325,14 +2333,17 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2341,13 +2352,16 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobFSWriteSettings' class AzureBlobStorageLinkedService(LinkedService): @@ -2549,14 +2563,17 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2565,13 +2582,16 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobStorageWriteSettings' class AzureDatabricksLinkedService(LinkedService): @@ -3518,14 +3538,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -3534,13 +3554,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' class AzureFunctionActivity(ExecutionActivity): @@ -11061,14 +11082,14 @@ class FileServerWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -11077,13 +11098,14 @@ class FileServerWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, **kwargs): super(FileServerWriteSettings, self).__init__(**kwargs) + self.type = 'FileServerWriteSettings' class FileShareDataset(Dataset): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 295f650553e3..6449023a7260 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -2283,19 +2283,23 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc class StoreWriteSettings(Model): """Connector write settings. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -2304,17 +2308,21 @@ class StoreWriteSettings(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type = type self.max_concurrent_connections = max_concurrent_connections self.copy_behavior = copy_behavior + self.type = None class AzureBlobFSWriteSettings(StoreWriteSettings): @@ -2325,14 +2333,17 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2341,13 +2352,16 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobFSWriteSettings' class AzureBlobStorageLinkedService(LinkedService): @@ -2549,14 +2563,17 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2565,13 +2582,16 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobStorageWriteSettings' class AzureDatabricksLinkedService(LinkedService): @@ -3518,14 +3538,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -3534,13 +3554,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' class AzureFunctionActivity(ExecutionActivity): @@ -11061,14 +11082,14 @@ class FileServerWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -11077,13 +11098,14 @@ class FileServerWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' class FileShareDataset(Dataset): From e99ffb75dbfa525a20bd221e32f180719e6dc43d Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 29 Aug 2019 20:27:36 -0700 Subject: [PATCH 24/30] Generated from 5e1bb35d5c3314d8f4fead76c3d69a2522be026b (#7005) Update review comments --- .../azure/mgmt/datafactory/models/__init__.py | 3 + .../azure/mgmt/datafactory/models/_models.py | 89 +++++++++++++++++- .../mgmt/datafactory/models/_models_py3.py | 91 ++++++++++++++++++- 3 files changed, 176 insertions(+), 7 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 298f059389b8..f45fba3e8bbf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -105,6 +105,7 @@ from ._models_py3 import CassandraLinkedService from ._models_py3 import CassandraSource from ._models_py3 import CassandraTableDataset + from ._models_py3 import ChainingTrigger from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService from ._models_py3 import CommonDataServiceForAppsSink @@ -621,6 +622,7 @@ from ._models import CassandraLinkedService from ._models import CassandraSource from ._models import CassandraTableDataset + from ._models import ChainingTrigger from ._models import CommonDataServiceForAppsEntityDataset from ._models import CommonDataServiceForAppsLinkedService from ._models import CommonDataServiceForAppsSink @@ -1220,6 +1222,7 @@ 'CassandraLinkedService', 'CassandraSource', 'CassandraTableDataset', + 'ChainingTrigger', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', 'CommonDataServiceForAppsSink', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index a056f9732314..aeb32319f20a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -5961,8 +5961,8 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger + sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -6000,7 +6000,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, **kwargs): @@ -6526,6 +6526,70 @@ def __init__(self, **kwargs): self.type = 'CassandraTable' +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline + runs based on runDimension Name/Value pairs. Upstream pipelines should + declare the same runDimension Name and their runs should have the values + for those runDimensions. The referenced pipeline run would be triggered if + the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when all + upstream pipelines complete successfully. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be + emitted by upstream pipelines. + :type run_dimension: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ChainingTrigger, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.depends_on = kwargs.get('depends_on', None) + self.run_dimension = kwargs.get('run_dimension', None) + self.type = 'ChainingTrigger' + + class CloudError(Model): """The object that defines the structure of an Azure Data Factory error response. @@ -20081,6 +20145,8 @@ class PipelineResource(SubResource): :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -20106,6 +20172,7 @@ class PipelineResource(SubResource): 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } @@ -20118,6 +20185,7 @@ def __init__(self, **kwargs): self.variables = kwargs.get('variables', None) self.concurrency = kwargs.get('concurrency', None) self.annotations = kwargs.get('annotations', None) + self.run_dimensions = kwargs.get('run_dimensions', None) self.folder = kwargs.get('folder', None) @@ -20143,6 +20211,8 @@ class PipelineRun(Model): :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline run. :vartype parameters: dict[str, str] + :ivar run_dimensions: Run dimensions emitted by Pipeline run. + :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event @@ -20166,6 +20236,7 @@ class PipelineRun(Model): 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, + 'run_dimensions': {'readonly': True}, 'invoked_by': {'readonly': True}, 'last_updated': {'readonly': True}, 'run_start': {'readonly': True}, @@ -20182,6 +20253,7 @@ class PipelineRun(Model): 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, @@ -20199,6 +20271,7 @@ def __init__(self, **kwargs): self.is_latest = None self.pipeline_name = None self.parameters = None + self.run_dimensions = None self.invoked_by = None self.last_updated = None self.run_start = None @@ -27428,6 +27501,10 @@ class TriggerRun(Model): :ivar triggered_pipelines: List of pipeline name and run Id triggered by the trigger run. :vartype triggered_pipelines: dict[str, str] + :ivar run_dimension: Run dimension for which trigger was fired. + :vartype run_dimension: dict[str, str] + :ivar dependency_status: Status of the upstream pipelines. + :vartype dependency_status: dict[str, object] """ _validation = { @@ -27439,6 +27516,8 @@ class TriggerRun(Model): 'message': {'readonly': True}, 'properties': {'readonly': True}, 'triggered_pipelines': {'readonly': True}, + 'run_dimension': {'readonly': True}, + 'dependency_status': {'readonly': True}, } _attribute_map = { @@ -27451,6 +27530,8 @@ class TriggerRun(Model): 'message': {'key': 'message', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, + 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, } def __init__(self, **kwargs): @@ -27464,6 +27545,8 @@ def __init__(self, **kwargs): self.message = None self.properties = None self.triggered_pipelines = None + self.run_dimension = None + self.dependency_status = None class TriggerRunsQueryResponse(Model): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 6449023a7260..7d89d02a63b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -5961,8 +5961,8 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger + sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -6000,7 +6000,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: @@ -6526,6 +6526,70 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'CassandraTable' +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline + runs based on runDimension Name/Value pairs. Upstream pipelines should + declare the same runDimension Name and their runs should have the values + for those runDimensions. The referenced pipeline run would be triggered if + the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when all + upstream pipelines complete successfully. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be + emitted by upstream pipelines. + :type run_dimension: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, + } + + def __init__(self, *, pipeline, depends_on, run_dimension: str, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: + super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipeline = pipeline + self.depends_on = depends_on + self.run_dimension = run_dimension + self.type = 'ChainingTrigger' + + class CloudError(Model): """The object that defines the structure of an Azure Data Factory error response. @@ -20081,6 +20145,8 @@ class PipelineResource(SubResource): :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -20106,10 +20172,11 @@ class PipelineResource(SubResource): 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } - def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, run_dimensions=None, folder=None, **kwargs) -> None: super(PipelineResource, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description @@ -20118,6 +20185,7 @@ def __init__(self, *, additional_properties=None, description: str=None, activit self.variables = variables self.concurrency = concurrency self.annotations = annotations + self.run_dimensions = run_dimensions self.folder = folder @@ -20143,6 +20211,8 @@ class PipelineRun(Model): :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline run. :vartype parameters: dict[str, str] + :ivar run_dimensions: Run dimensions emitted by Pipeline run. + :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event @@ -20166,6 +20236,7 @@ class PipelineRun(Model): 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, + 'run_dimensions': {'readonly': True}, 'invoked_by': {'readonly': True}, 'last_updated': {'readonly': True}, 'run_start': {'readonly': True}, @@ -20182,6 +20253,7 @@ class PipelineRun(Model): 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, @@ -20199,6 +20271,7 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.is_latest = None self.pipeline_name = None self.parameters = None + self.run_dimensions = None self.invoked_by = None self.last_updated = None self.run_start = None @@ -27428,6 +27501,10 @@ class TriggerRun(Model): :ivar triggered_pipelines: List of pipeline name and run Id triggered by the trigger run. :vartype triggered_pipelines: dict[str, str] + :ivar run_dimension: Run dimension for which trigger was fired. + :vartype run_dimension: dict[str, str] + :ivar dependency_status: Status of the upstream pipelines. + :vartype dependency_status: dict[str, object] """ _validation = { @@ -27439,6 +27516,8 @@ class TriggerRun(Model): 'message': {'readonly': True}, 'properties': {'readonly': True}, 'triggered_pipelines': {'readonly': True}, + 'run_dimension': {'readonly': True}, + 'dependency_status': {'readonly': True}, } _attribute_map = { @@ -27451,6 +27530,8 @@ class TriggerRun(Model): 'message': {'key': 'message', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, + 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -27464,6 +27545,8 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.message = None self.properties = None self.triggered_pipelines = None + self.run_dimension = None + self.dependency_status = None class TriggerRunsQueryResponse(Model): From 4276f1b236b203c6228e4ab40a24b7648acd9e97 Mon Sep 17 00:00:00 2001 From: Zim Kalinowski Date: Fri, 30 Aug 2019 11:23:34 +0000 Subject: [PATCH 25/30] using old version of autorest --- .../azure/mgmt/datafactory/__init__.py | 7 +- .../azure/mgmt/datafactory/_configuration.py | 48 - ...t.py => data_factory_management_client.py} | 64 +- .../azure/mgmt/datafactory/models/__init__.py | 3027 +- .../azure/mgmt/datafactory/models/_models.py | 29103 ---------------- .../mgmt/datafactory/models/_models_py3.py | 29103 ---------------- .../mgmt/datafactory/models/_paged_models.py | 118 - .../models/access_policy_response.py | 36 + .../models/access_policy_response_py3.py | 36 + .../azure/mgmt/datafactory/models/activity.py | 63 + .../datafactory/models/activity_dependency.py | 46 + .../models/activity_dependency_py3.py | 46 + .../datafactory/models/activity_policy.py | 59 + .../datafactory/models/activity_policy_py3.py | 59 + .../mgmt/datafactory/models/activity_py3.py | 63 + .../mgmt/datafactory/models/activity_run.py | 102 + .../datafactory/models/activity_run_py3.py | 102 + .../models/activity_runs_query_response.py | 39 + .../activity_runs_query_response_py3.py | 39 + .../models/amazon_mws_linked_service.py | 106 + .../models/amazon_mws_linked_service_py3.py | 106 + .../models/amazon_mws_object_dataset.py | 72 + .../models/amazon_mws_object_dataset_py3.py | 72 + .../datafactory/models/amazon_mws_source.py | 57 + .../models/amazon_mws_source_py3.py | 57 + .../models/amazon_redshift_linked_service.py | 86 + .../amazon_redshift_linked_service_py3.py | 86 + .../models/amazon_redshift_source.py | 65 + .../models/amazon_redshift_source_py3.py | 65 + .../models/amazon_redshift_table_dataset.py | 82 + .../amazon_redshift_table_dataset_py3.py | 82 + .../datafactory/models/amazon_s3_dataset.py | 107 + .../models/amazon_s3_dataset_py3.py | 107 + .../models/amazon_s3_linked_service.py | 77 + .../models/amazon_s3_linked_service_py3.py | 77 + .../datafactory/models/amazon_s3_location.py | 55 + .../models/amazon_s3_location_py3.py | 55 + .../models/amazon_s3_read_settings.py | 78 + .../models/amazon_s3_read_settings_py3.py | 78 + .../models/append_variable_activity.py | 60 + .../models/append_variable_activity_py3.py | 60 + .../mgmt/datafactory/models/avro_dataset.py | 83 + .../datafactory/models/avro_dataset_py3.py | 83 + .../mgmt/datafactory/models/avro_format.py | 46 + .../datafactory/models/avro_format_py3.py | 46 + .../mgmt/datafactory/models/avro_sink.py | 69 + .../mgmt/datafactory/models/avro_sink_py3.py | 69 + .../mgmt/datafactory/models/avro_source.py | 56 + .../datafactory/models/avro_source_py3.py | 56 + .../datafactory/models/avro_write_settings.py | 46 + .../models/avro_write_settings_py3.py | 46 + .../models/azure_batch_linked_service.py | 88 + .../models/azure_batch_linked_service_py3.py | 88 + .../datafactory/models/azure_blob_dataset.py | 100 + .../models/azure_blob_dataset_py3.py | 100 + .../models/azure_blob_fs_dataset.py | 85 + .../models/azure_blob_fs_dataset_py3.py | 85 + .../models/azure_blob_fs_linked_service.py | 86 + .../azure_blob_fs_linked_service_py3.py | 86 + .../models/azure_blob_fs_location.py | 50 + .../models/azure_blob_fs_location_py3.py | 50 + .../models/azure_blob_fs_read_settings.py | 73 + .../models/azure_blob_fs_read_settings_py3.py | 73 + .../datafactory/models/azure_blob_fs_sink.py | 65 + .../models/azure_blob_fs_sink_py3.py | 65 + .../models/azure_blob_fs_source.py | 68 + .../models/azure_blob_fs_source_py3.py | 68 + .../models/azure_blob_fs_write_settings.py | 51 + .../azure_blob_fs_write_settings_py3.py | 51 + .../azure_blob_storage_linked_service.py | 104 + .../azure_blob_storage_linked_service_py3.py | 104 + .../models/azure_blob_storage_location.py | 50 + .../models/azure_blob_storage_location_py3.py | 50 + .../azure_blob_storage_read_settings.py | 73 + .../azure_blob_storage_read_settings_py3.py | 73 + .../azure_blob_storage_write_settings.py | 51 + .../azure_blob_storage_write_settings_py3.py | 51 + .../azure_data_explorer_command_activity.py | 71 + ...zure_data_explorer_command_activity_py3.py | 71 + .../azure_data_explorer_linked_service.py | 86 + .../azure_data_explorer_linked_service_py3.py | 86 + .../models/azure_data_explorer_sink.py | 76 + .../models/azure_data_explorer_sink_py3.py | 76 + .../models/azure_data_explorer_source.py | 70 + .../models/azure_data_explorer_source_py3.py | 70 + .../azure_data_explorer_table_dataset.py | 72 + .../azure_data_explorer_table_dataset_py3.py | 72 + ...zure_data_lake_analytics_linked_service.py | 99 + ..._data_lake_analytics_linked_service_py3.py | 99 + .../models/azure_data_lake_store_dataset.py | 86 + .../azure_data_lake_store_dataset_py3.py | 86 + .../azure_data_lake_store_linked_service.py | 98 + ...zure_data_lake_store_linked_service_py3.py | 98 + .../models/azure_data_lake_store_location.py | 45 + .../azure_data_lake_store_location_py3.py | 45 + .../azure_data_lake_store_read_settings.py | 73 + ...azure_data_lake_store_read_settings_py3.py | 73 + .../models/azure_data_lake_store_sink.py | 69 + .../models/azure_data_lake_store_sink_py3.py | 69 + .../models/azure_data_lake_store_source.py | 58 + .../azure_data_lake_store_source_py3.py | 58 + .../azure_data_lake_store_write_settings.py | 46 + ...zure_data_lake_store_write_settings_py3.py | 46 + .../models/azure_databricks_linked_service.py | 126 + .../azure_databricks_linked_service_py3.py | 126 + .../models/azure_function_activity.py | 85 + .../models/azure_function_activity_py3.py | 85 + .../models/azure_function_linked_service.py | 69 + .../azure_function_linked_service_py3.py | 69 + .../models/azure_key_vault_linked_service.py | 60 + .../azure_key_vault_linked_service_py3.py | 60 + .../azure_key_vault_secret_reference.py | 51 + .../azure_key_vault_secret_reference_py3.py | 51 + .../models/azure_maria_db_linked_service.py | 69 + .../azure_maria_db_linked_service_py3.py | 69 + .../models/azure_maria_db_source.py | 57 + .../models/azure_maria_db_source_py3.py | 57 + .../models/azure_maria_db_table_dataset.py | 72 + .../azure_maria_db_table_dataset_py3.py | 72 + .../azure_ml_batch_execution_activity.py | 82 + .../azure_ml_batch_execution_activity_py3.py | 82 + .../models/azure_ml_linked_service.py | 94 + .../models/azure_ml_linked_service_py3.py | 94 + .../azure_ml_update_resource_activity.py | 81 + .../azure_ml_update_resource_activity_py3.py | 81 + .../models/azure_ml_web_service_file.py | 43 + .../models/azure_ml_web_service_file_py3.py | 43 + .../models/azure_my_sql_linked_service.py | 71 + .../models/azure_my_sql_linked_service_py3.py | 71 + .../datafactory/models/azure_my_sql_sink.py | 66 + .../models/azure_my_sql_sink_py3.py | 66 + .../datafactory/models/azure_my_sql_source.py | 57 + .../models/azure_my_sql_source_py3.py | 57 + .../models/azure_my_sql_table_dataset.py | 72 + .../models/azure_my_sql_table_dataset_py3.py | 72 + .../azure_postgre_sql_linked_service.py | 70 + .../azure_postgre_sql_linked_service_py3.py | 70 + .../models/azure_postgre_sql_sink.py | 66 + .../models/azure_postgre_sql_sink_py3.py | 66 + .../models/azure_postgre_sql_source.py | 57 + .../models/azure_postgre_sql_source_py3.py | 57 + .../models/azure_postgre_sql_table_dataset.py | 84 + .../azure_postgre_sql_table_dataset_py3.py | 84 + .../datafactory/models/azure_queue_sink.py | 61 + .../models/azure_queue_sink_py3.py | 61 + .../models/azure_search_index_dataset.py | 73 + .../models/azure_search_index_dataset_py3.py | 73 + .../models/azure_search_index_sink.py | 67 + .../models/azure_search_index_sink_py3.py | 67 + .../models/azure_search_linked_service.py | 69 + .../models/azure_search_linked_service_py3.py | 69 + .../azure_sql_database_linked_service.py | 87 + .../azure_sql_database_linked_service_py3.py | 87 + .../models/azure_sql_dw_linked_service.py | 88 + .../models/azure_sql_dw_linked_service_py3.py | 88 + .../models/azure_sql_dw_table_dataset.py | 82 + .../models/azure_sql_dw_table_dataset_py3.py | 82 + .../models/azure_sql_mi_linked_service.py | 87 + .../models/azure_sql_mi_linked_service_py3.py | 87 + .../models/azure_sql_mi_table_dataset.py | 82 + .../models/azure_sql_mi_table_dataset_py3.py | 82 + .../mgmt/datafactory/models/azure_sql_sink.py | 93 + .../datafactory/models/azure_sql_sink_py3.py | 93 + .../datafactory/models/azure_sql_source.py | 73 + .../models/azure_sql_source_py3.py | 73 + .../models/azure_sql_table_dataset.py | 82 + .../models/azure_sql_table_dataset_py3.py | 82 + .../models/azure_storage_linked_service.py | 83 + .../azure_storage_linked_service_py3.py | 83 + .../datafactory/models/azure_table_dataset.py | 73 + .../models/azure_table_dataset_py3.py | 73 + .../datafactory/models/azure_table_sink.py | 81 + .../models/azure_table_sink_py3.py | 81 + .../datafactory/models/azure_table_source.py | 63 + .../models/azure_table_source_py3.py | 63 + .../azure_table_storage_linked_service.py | 83 + .../azure_table_storage_linked_service_py3.py | 83 + .../mgmt/datafactory/models/binary_dataset.py | 77 + .../datafactory/models/binary_dataset_py3.py | 77 + .../mgmt/datafactory/models/binary_sink.py | 65 + .../datafactory/models/binary_sink_py3.py | 65 + .../mgmt/datafactory/models/binary_source.py | 56 + .../datafactory/models/binary_source_py3.py | 56 + .../datafactory/models/blob_events_trigger.py | 85 + .../models/blob_events_trigger_py3.py | 85 + .../mgmt/datafactory/models/blob_sink.py | 80 + .../mgmt/datafactory/models/blob_sink_py3.py | 80 + .../mgmt/datafactory/models/blob_source.py | 68 + .../datafactory/models/blob_source_py3.py | 68 + .../mgmt/datafactory/models/blob_trigger.py | 78 + .../datafactory/models/blob_trigger_py3.py | 78 + .../models/cassandra_linked_service.py | 84 + .../models/cassandra_linked_service_py3.py | 84 + .../datafactory/models/cassandra_source.py | 70 + .../models/cassandra_source_py3.py | 70 + .../models/cassandra_table_dataset.py | 77 + .../models/cassandra_table_dataset_py3.py | 77 + ...on_data_service_for_apps_entity_dataset.py | 72 + ...ata_service_for_apps_entity_dataset_py3.py | 72 + ...on_data_service_for_apps_linked_service.py | 115 + ...ata_service_for_apps_linked_service_py3.py | 115 + .../common_data_service_for_apps_sink.py | 77 + .../common_data_service_for_apps_sink_py3.py | 77 + .../common_data_service_for_apps_source.py | 58 + ...common_data_service_for_apps_source_py3.py | 58 + .../models/concur_linked_service.py | 92 + .../models/concur_linked_service_py3.py | 92 + .../models/concur_object_dataset.py | 72 + .../models/concur_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/concur_source.py | 57 + .../datafactory/models/concur_source_py3.py | 57 + .../datafactory/models/control_activity.py | 60 + .../models/control_activity_py3.py | 60 + .../mgmt/datafactory/models/copy_activity.py | 124 + .../datafactory/models/copy_activity_py3.py | 124 + .../mgmt/datafactory/models/copy_sink.py | 82 + .../mgmt/datafactory/models/copy_sink_py3.py | 82 + .../mgmt/datafactory/models/copy_source.py | 83 + .../datafactory/models/copy_source_py3.py | 83 + .../models/cosmos_db_linked_service.py | 71 + .../models/cosmos_db_linked_service_py3.py | 71 + ...smos_db_mongo_db_api_collection_dataset.py | 73 + ..._db_mongo_db_api_collection_dataset_py3.py | 73 + .../cosmos_db_mongo_db_api_linked_service.py | 67 + ...smos_db_mongo_db_api_linked_service_py3.py | 67 + .../models/cosmos_db_mongo_db_api_sink.py | 68 + .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 + .../models/cosmos_db_mongo_db_api_source.py | 71 + .../cosmos_db_mongo_db_api_source_py3.py | 71 + .../models/couchbase_linked_service.py | 70 + .../models/couchbase_linked_service_py3.py | 70 + .../datafactory/models/couchbase_source.py | 57 + .../models/couchbase_source_py3.py | 57 + .../models/couchbase_table_dataset.py | 72 + .../models/couchbase_table_dataset_py3.py | 72 + ...eate_linked_integration_runtime_request.py | 43 + ..._linked_integration_runtime_request_py3.py | 43 + .../datafactory/models/create_run_response.py | 34 + .../models/create_run_response_py3.py | 34 + .../datafactory/models/custom_activity.py | 91 + .../datafactory/models/custom_activity_py3.py | 91 + .../custom_activity_reference_object.py | 33 + .../custom_activity_reference_object_py3.py | 33 + .../custom_data_source_linked_service.py | 58 + .../custom_data_source_linked_service_py3.py | 58 + .../mgmt/datafactory/models/custom_dataset.py | 71 + .../datafactory/models/custom_dataset_py3.py | 71 + ...> data_factory_management_client_enums.py} | 0 .../data_lake_analytics_usql_activity.py | 98 + .../data_lake_analytics_usql_activity_py3.py | 98 + .../models/databricks_notebook_activity.py | 76 + .../databricks_notebook_activity_py3.py | 76 + .../models/databricks_spark_jar_activity.py | 75 + .../databricks_spark_jar_activity_py3.py | 75 + .../databricks_spark_python_activity.py | 75 + .../databricks_spark_python_activity_py3.py | 75 + .../azure/mgmt/datafactory/models/dataset.py | 113 + .../models/dataset_bzip2_compression.py | 38 + .../models/dataset_bzip2_compression_py3.py | 38 + .../datafactory/models/dataset_compression.py | 47 + .../models/dataset_compression_py3.py | 47 + .../models/dataset_deflate_compression.py | 42 + .../models/dataset_deflate_compression_py3.py | 42 + .../mgmt/datafactory/models/dataset_folder.py | 29 + .../datafactory/models/dataset_folder_py3.py | 29 + .../models/dataset_gzip_compression.py | 42 + .../models/dataset_gzip_compression_py3.py | 42 + .../datafactory/models/dataset_location.py | 49 + .../models/dataset_location_py3.py | 49 + .../mgmt/datafactory/models/dataset_py3.py | 113 + .../datafactory/models/dataset_reference.py | 48 + .../models/dataset_reference_py3.py | 48 + .../datafactory/models/dataset_resource.py | 53 + .../models/dataset_resource_paged.py | 27 + .../models/dataset_resource_py3.py | 53 + .../models/dataset_storage_format.py | 57 + .../models/dataset_storage_format_py3.py | 57 + .../models/dataset_zip_deflate_compression.py | 42 + .../dataset_zip_deflate_compression_py3.py | 42 + .../datafactory/models/db2_linked_service.py | 86 + .../models/db2_linked_service_py3.py | 86 + .../mgmt/datafactory/models/db2_source.py | 57 + .../mgmt/datafactory/models/db2_source_py3.py | 57 + .../datafactory/models/db2_table_dataset.py | 82 + .../models/db2_table_dataset_py3.py | 82 + .../datafactory/models/delete_activity.py | 87 + .../datafactory/models/delete_activity_py3.py | 87 + .../models/delimited_text_dataset.py | 122 + .../models/delimited_text_dataset_py3.py | 122 + .../models/delimited_text_read_settings.py | 43 + .../delimited_text_read_settings_py3.py | 43 + .../datafactory/models/delimited_text_sink.py | 70 + .../models/delimited_text_sink_py3.py | 70 + .../models/delimited_text_source.py | 61 + .../models/delimited_text_source_py3.py | 61 + .../models/delimited_text_write_settings.py | 49 + .../delimited_text_write_settings_py3.py | 49 + .../models/dependency_reference.py | 42 + .../models/dependency_reference_py3.py | 42 + .../datafactory/models/distcp_settings.py | 49 + .../datafactory/models/distcp_settings_py3.py | 49 + .../models/document_db_collection_dataset.py | 73 + .../document_db_collection_dataset_py3.py | 73 + .../models/document_db_collection_sink.py | 71 + .../models/document_db_collection_sink_py3.py | 71 + .../models/document_db_collection_source.py | 62 + .../document_db_collection_source_py3.py | 62 + .../models/drill_linked_service.py | 69 + .../models/drill_linked_service_py3.py | 69 + .../mgmt/datafactory/models/drill_source.py | 57 + .../datafactory/models/drill_source_py3.py | 57 + .../datafactory/models/drill_table_dataset.py | 82 + .../models/drill_table_dataset_py3.py | 82 + .../models/dynamics_ax_linked_service.py | 93 + .../models/dynamics_ax_linked_service_py3.py | 93 + .../models/dynamics_ax_resource_dataset.py | 73 + .../dynamics_ax_resource_dataset_py3.py | 73 + .../datafactory/models/dynamics_ax_source.py | 57 + .../models/dynamics_ax_source_py3.py | 57 + .../models/dynamics_crm_entity_dataset.py | 72 + .../models/dynamics_crm_entity_dataset_py3.py | 72 + .../models/dynamics_crm_linked_service.py | 112 + .../models/dynamics_crm_linked_service_py3.py | 112 + .../datafactory/models/dynamics_crm_sink.py | 77 + .../models/dynamics_crm_sink_py3.py | 77 + .../datafactory/models/dynamics_crm_source.py | 58 + .../models/dynamics_crm_source_py3.py | 58 + .../models/dynamics_entity_dataset.py | 72 + .../models/dynamics_entity_dataset_py3.py | 72 + .../models/dynamics_linked_service.py | 109 + .../models/dynamics_linked_service_py3.py | 109 + .../mgmt/datafactory/models/dynamics_sink.py | 77 + .../datafactory/models/dynamics_sink_py3.py | 77 + .../datafactory/models/dynamics_source.py | 58 + .../datafactory/models/dynamics_source_py3.py | 58 + .../models/eloqua_linked_service.py | 91 + .../models/eloqua_linked_service_py3.py | 91 + .../models/eloqua_object_dataset.py | 72 + .../models/eloqua_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/eloqua_source.py | 57 + .../datafactory/models/eloqua_source_py3.py | 57 + .../datafactory/models/entity_reference.py | 34 + .../models/entity_reference_py3.py | 34 + .../models/execute_pipeline_activity.py | 65 + .../models/execute_pipeline_activity_py3.py | 65 + .../models/execute_ssis_package_activity.py | 124 + .../execute_ssis_package_activity_py3.py | 124 + .../datafactory/models/execution_activity.py | 75 + .../models/execution_activity_py3.py | 75 + .../models/exposure_control_request.py | 32 + .../models/exposure_control_request_py3.py | 32 + .../models/exposure_control_response.py | 40 + .../models/exposure_control_response_py3.py | 40 + .../mgmt/datafactory/models/expression.py | 43 + .../mgmt/datafactory/models/expression_py3.py | 43 + .../azure/mgmt/datafactory/models/factory.py | 81 + .../models/factory_git_hub_configuration.py | 58 + .../factory_git_hub_configuration_py3.py | 58 + .../datafactory/models/factory_identity.py | 49 + .../models/factory_identity_py3.py | 49 + .../mgmt/datafactory/models/factory_paged.py | 27 + .../mgmt/datafactory/models/factory_py3.py | 81 + .../models/factory_repo_configuration.py | 65 + .../models/factory_repo_configuration_py3.py | 65 + .../datafactory/models/factory_repo_update.py | 33 + .../models/factory_repo_update_py3.py | 33 + .../models/factory_update_parameters.py | 32 + .../models/factory_update_parameters_py3.py | 32 + .../models/factory_vsts_configuration.py | 62 + .../models/factory_vsts_configuration_py3.py | 62 + .../models/file_server_linked_service.py | 74 + .../models/file_server_linked_service_py3.py | 74 + .../models/file_server_location.py | 45 + .../models/file_server_location_py3.py | 45 + .../models/file_server_read_settings.py | 73 + .../models/file_server_read_settings_py3.py | 73 + .../models/file_server_write_settings.py | 46 + .../models/file_server_write_settings_py3.py | 46 + .../datafactory/models/file_share_dataset.py | 101 + .../models/file_share_dataset_py3.py | 101 + .../datafactory/models/file_system_sink.py | 65 + .../models/file_system_sink_py3.py | 65 + .../datafactory/models/file_system_source.py | 58 + .../models/file_system_source_py3.py | 58 + .../datafactory/models/filter_activity.py | 61 + .../datafactory/models/filter_activity_py3.py | 61 + .../datafactory/models/for_each_activity.py | 73 + .../models/for_each_activity_py3.py | 73 + .../models/format_read_settings.py | 39 + .../models/format_read_settings_py3.py | 39 + .../models/format_write_settings.py | 39 + .../models/format_write_settings_py3.py | 39 + .../datafactory/models/ftp_read_settings.py | 63 + .../models/ftp_read_settings_py3.py | 63 + .../models/ftp_server_linked_service.py | 98 + .../models/ftp_server_linked_service_py3.py | 98 + .../datafactory/models/ftp_server_location.py | 45 + .../models/ftp_server_location_py3.py | 45 + .../models/get_metadata_activity.py | 67 + .../models/get_metadata_activity_py3.py | 67 + .../get_ssis_object_metadata_request.py | 28 + .../get_ssis_object_metadata_request_py3.py | 28 + .../models/git_hub_access_token_request.py | 44 + .../git_hub_access_token_request_py3.py | 44 + .../models/git_hub_access_token_response.py | 28 + .../git_hub_access_token_response_py3.py | 28 + .../models/google_ad_words_linked_service.py | 119 + .../google_ad_words_linked_service_py3.py | 119 + .../models/google_ad_words_object_dataset.py | 72 + .../google_ad_words_object_dataset_py3.py | 72 + .../models/google_ad_words_source.py | 57 + .../models/google_ad_words_source_py3.py | 57 + .../models/google_big_query_linked_service.py | 124 + .../google_big_query_linked_service_py3.py | 124 + .../models/google_big_query_object_dataset.py | 82 + .../google_big_query_object_dataset_py3.py | 82 + .../models/google_big_query_source.py | 57 + .../models/google_big_query_source_py3.py | 57 + .../models/greenplum_linked_service.py | 69 + .../models/greenplum_linked_service_py3.py | 69 + .../datafactory/models/greenplum_source.py | 57 + .../models/greenplum_source_py3.py | 57 + .../models/greenplum_table_dataset.py | 82 + .../models/greenplum_table_dataset_py3.py | 82 + .../models/hbase_linked_service.py | 114 + .../models/hbase_linked_service_py3.py | 114 + .../models/hbase_object_dataset.py | 72 + .../models/hbase_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hbase_source.py | 57 + .../datafactory/models/hbase_source_py3.py | 57 + .../models/hd_insight_hive_activity.py | 96 + .../models/hd_insight_hive_activity_py3.py | 96 + .../models/hd_insight_linked_service.py | 96 + .../models/hd_insight_linked_service_py3.py | 96 + .../models/hd_insight_map_reduce_activity.py | 99 + .../hd_insight_map_reduce_activity_py3.py | 99 + .../hd_insight_on_demand_linked_service.py | 237 + ...hd_insight_on_demand_linked_service_py3.py | 237 + .../models/hd_insight_pig_activity.py | 87 + .../models/hd_insight_pig_activity_py3.py | 87 + .../models/hd_insight_spark_activity.py | 100 + .../models/hd_insight_spark_activity_py3.py | 100 + .../models/hd_insight_streaming_activity.py | 122 + .../hd_insight_streaming_activity_py3.py | 122 + .../datafactory/models/hdfs_linked_service.py | 81 + .../models/hdfs_linked_service_py3.py | 81 + .../mgmt/datafactory/models/hdfs_location.py | 45 + .../datafactory/models/hdfs_location_py3.py | 45 + .../datafactory/models/hdfs_read_settings.py | 77 + .../models/hdfs_read_settings_py3.py | 77 + .../mgmt/datafactory/models/hdfs_source.py | 62 + .../datafactory/models/hdfs_source_py3.py | 62 + .../datafactory/models/hive_linked_service.py | 147 + .../models/hive_linked_service_py3.py | 147 + .../datafactory/models/hive_object_dataset.py | 82 + .../models/hive_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/hive_source.py | 57 + .../datafactory/models/hive_source_py3.py | 57 + .../mgmt/datafactory/models/http_dataset.py | 99 + .../datafactory/models/http_dataset_py3.py | 99 + .../datafactory/models/http_linked_service.py | 105 + .../models/http_linked_service_py3.py | 105 + .../datafactory/models/http_read_settings.py | 63 + .../models/http_read_settings_py3.py | 63 + .../models/http_server_location.py | 50 + .../models/http_server_location_py3.py | 50 + .../mgmt/datafactory/models/http_source.py | 60 + .../datafactory/models/http_source_py3.py | 60 + .../models/hubspot_linked_service.py | 96 + .../models/hubspot_linked_service_py3.py | 96 + .../models/hubspot_object_dataset.py | 72 + .../models/hubspot_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hubspot_source.py | 57 + .../datafactory/models/hubspot_source_py3.py | 57 + .../models/if_condition_activity.py | 72 + .../models/if_condition_activity_py3.py | 72 + .../models/impala_linked_service.py | 117 + .../models/impala_linked_service_py3.py | 117 + .../models/impala_object_dataset.py | 82 + .../models/impala_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/impala_source.py | 57 + .../datafactory/models/impala_source_py3.py | 57 + .../models/informix_linked_service.py | 86 + .../models/informix_linked_service_py3.py | 86 + .../mgmt/datafactory/models/informix_sink.py | 66 + .../datafactory/models/informix_sink_py3.py | 66 + .../datafactory/models/informix_source.py | 57 + .../datafactory/models/informix_source_py3.py | 57 + .../models/informix_table_dataset.py | 72 + .../models/informix_table_dataset_py3.py | 72 + .../datafactory/models/integration_runtime.py | 51 + .../models/integration_runtime_auth_keys.py | 32 + .../integration_runtime_auth_keys_py3.py | 32 + .../integration_runtime_compute_properties.py | 60 + ...egration_runtime_compute_properties_py3.py | 60 + .../integration_runtime_connection_info.py | 70 + ...integration_runtime_connection_info_py3.py | 70 + ..._runtime_custom_setup_script_properties.py | 33 + ...time_custom_setup_script_properties_py3.py | 33 + ...tegration_runtime_data_proxy_properties.py | 37 + ...ation_runtime_data_proxy_properties_py3.py | 37 + .../integration_runtime_monitoring_data.py | 33 + ...integration_runtime_monitoring_data_py3.py | 33 + .../integration_runtime_node_ip_address.py | 35 + ...integration_runtime_node_ip_address_py3.py | 35 + ...ntegration_runtime_node_monitoring_data.py | 79 + ...ration_runtime_node_monitoring_data_py3.py | 79 + .../models/integration_runtime_py3.py | 51 + .../models/integration_runtime_reference.py | 48 + .../integration_runtime_reference_py3.py | 48 + ...ation_runtime_regenerate_key_parameters.py | 30 + ...n_runtime_regenerate_key_parameters_py3.py | 30 + .../models/integration_runtime_resource.py | 53 + .../integration_runtime_resource_paged.py | 27 + .../integration_runtime_resource_py3.py | 53 + .../integration_runtime_ssis_catalog_info.py | 55 + ...tegration_runtime_ssis_catalog_info_py3.py | 55 + .../integration_runtime_ssis_properties.py | 59 + ...integration_runtime_ssis_properties_py3.py | 59 + .../models/integration_runtime_status.py | 64 + ...ntegration_runtime_status_list_response.py | 40 + ...ration_runtime_status_list_response_py3.py | 40 + .../models/integration_runtime_status_py3.py | 64 + .../integration_runtime_status_response.py | 42 + ...integration_runtime_status_response_py3.py | 42 + .../integration_runtime_vnet_properties.py | 38 + ...integration_runtime_vnet_properties_py3.py | 38 + .../datafactory/models/jira_linked_service.py | 98 + .../models/jira_linked_service_py3.py | 98 + .../datafactory/models/jira_object_dataset.py | 72 + .../models/jira_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/jira_source.py | 57 + .../datafactory/models/jira_source_py3.py | 57 + .../mgmt/datafactory/models/json_dataset.py | 85 + .../datafactory/models/json_dataset_py3.py | 85 + .../mgmt/datafactory/models/json_format.py | 82 + .../datafactory/models/json_format_py3.py | 82 + .../mgmt/datafactory/models/json_sink.py | 69 + .../mgmt/datafactory/models/json_sink_py3.py | 69 + .../mgmt/datafactory/models/json_source.py | 56 + .../datafactory/models/json_source_py3.py | 56 + .../datafactory/models/json_write_settings.py | 45 + .../models/json_write_settings_py3.py | 45 + .../models/linked_integration_runtime.py | 58 + ...d_integration_runtime_key_authorization.py | 39 + ...tegration_runtime_key_authorization_py3.py | 39 + .../models/linked_integration_runtime_py3.py | 58 + ..._integration_runtime_rbac_authorization.py | 41 + ...egration_runtime_rbac_authorization_py3.py | 41 + .../linked_integration_runtime_request.py | 35 + .../linked_integration_runtime_request_py3.py | 35 + .../models/linked_integration_runtime_type.py | 42 + .../linked_integration_runtime_type_py3.py | 42 + .../mgmt/datafactory/models/linked_service.py | 102 + .../datafactory/models/linked_service_py3.py | 102 + .../models/linked_service_reference.py | 48 + .../models/linked_service_reference_py3.py | 48 + .../models/linked_service_resource.py | 53 + .../models/linked_service_resource_paged.py | 27 + .../models/linked_service_resource_py3.py | 53 + .../models/log_storage_settings.py | 46 + .../models/log_storage_settings_py3.py | 46 + .../datafactory/models/lookup_activity.py | 74 + .../datafactory/models/lookup_activity_py3.py | 74 + .../models/magento_linked_service.py | 85 + .../models/magento_linked_service_py3.py | 85 + .../models/magento_object_dataset.py | 72 + .../models/magento_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/magento_source.py | 57 + .../datafactory/models/magento_source_py3.py | 57 + .../models/managed_integration_runtime.py | 65 + .../managed_integration_runtime_error.py | 55 + .../managed_integration_runtime_error_py3.py | 55 + .../managed_integration_runtime_node.py | 52 + .../managed_integration_runtime_node_py3.py | 52 + ...ed_integration_runtime_operation_result.py | 65 + ...ntegration_runtime_operation_result_py3.py | 65 + .../models/managed_integration_runtime_py3.py | 65 + .../managed_integration_runtime_status.py | 78 + .../managed_integration_runtime_status_py3.py | 78 + .../models/maria_db_linked_service.py | 69 + .../models/maria_db_linked_service_py3.py | 69 + .../datafactory/models/maria_db_source.py | 57 + .../datafactory/models/maria_db_source_py3.py | 57 + .../models/maria_db_table_dataset.py | 72 + .../models/maria_db_table_dataset_py3.py | 72 + .../models/marketo_linked_service.py | 90 + .../models/marketo_linked_service_py3.py | 90 + .../models/marketo_object_dataset.py | 72 + .../models/marketo_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/marketo_source.py | 57 + .../datafactory/models/marketo_source_py3.py | 57 + .../models/microsoft_access_linked_service.py | 86 + .../microsoft_access_linked_service_py3.py | 86 + .../models/microsoft_access_sink.py | 66 + .../models/microsoft_access_sink_py3.py | 66 + .../models/microsoft_access_source.py | 57 + .../models/microsoft_access_source_py3.py | 57 + .../models/microsoft_access_table_dataset.py | 72 + .../microsoft_access_table_dataset_py3.py | 72 + .../models/mongo_db_collection_dataset.py | 73 + .../models/mongo_db_collection_dataset_py3.py | 73 + .../mongo_db_cursor_methods_properties.py | 53 + .../mongo_db_cursor_methods_properties_py3.py | 53 + .../models/mongo_db_linked_service.py | 109 + .../models/mongo_db_linked_service_py3.py | 109 + .../datafactory/models/mongo_db_source.py | 57 + .../datafactory/models/mongo_db_source_py3.py | 57 + .../models/mongo_db_v2_collection_dataset.py | 73 + .../mongo_db_v2_collection_dataset_py3.py | 73 + .../models/mongo_db_v2_linked_service.py | 66 + .../models/mongo_db_v2_linked_service_py3.py | 66 + .../datafactory/models/mongo_db_v2_source.py | 71 + .../models/mongo_db_v2_source_py3.py | 71 + .../models/multiple_pipeline_trigger.py | 68 + .../models/multiple_pipeline_trigger_py3.py | 68 + .../models/my_sql_linked_service.py | 70 + .../models/my_sql_linked_service_py3.py | 70 + .../mgmt/datafactory/models/my_sql_source.py | 57 + .../datafactory/models/my_sql_source_py3.py | 57 + .../models/my_sql_table_dataset.py | 72 + .../models/my_sql_table_dataset_py3.py | 72 + .../models/netezza_linked_service.py | 69 + .../models/netezza_linked_service_py3.py | 69 + .../models/netezza_partition_settings.py | 42 + .../models/netezza_partition_settings_py3.py | 42 + .../mgmt/datafactory/models/netezza_source.py | 70 + .../datafactory/models/netezza_source_py3.py | 70 + .../models/netezza_table_dataset.py | 82 + .../models/netezza_table_dataset_py3.py | 82 + .../models/odata_linked_service.py | 127 + .../models/odata_linked_service_py3.py | 127 + .../models/odata_resource_dataset.py | 72 + .../models/odata_resource_dataset_py3.py | 72 + .../mgmt/datafactory/models/odata_source.py | 57 + .../datafactory/models/odata_source_py3.py | 57 + .../datafactory/models/odbc_linked_service.py | 86 + .../models/odbc_linked_service_py3.py | 86 + .../mgmt/datafactory/models/odbc_sink.py | 66 + .../mgmt/datafactory/models/odbc_sink_py3.py | 66 + .../mgmt/datafactory/models/odbc_source.py | 57 + .../datafactory/models/odbc_source_py3.py | 57 + .../datafactory/models/odbc_table_dataset.py | 72 + .../models/odbc_table_dataset_py3.py | 72 + .../datafactory/models/office365_dataset.py | 79 + .../models/office365_dataset_py3.py | 79 + .../models/office365_linked_service.py | 83 + .../models/office365_linked_service_py3.py | 83 + .../datafactory/models/office365_source.py | 78 + .../models/office365_source_py3.py | 78 + .../mgmt/datafactory/models/operation.py | 41 + .../datafactory/models/operation_display.py | 41 + .../models/operation_display_py3.py | 41 + .../models/operation_log_specification.py | 37 + .../models/operation_log_specification_py3.py | 37 + .../models/operation_metric_availability.py | 33 + .../operation_metric_availability_py3.py | 33 + .../models/operation_metric_dimension.py | 37 + .../models/operation_metric_dimension_py3.py | 37 + .../models/operation_metric_specification.py | 68 + .../operation_metric_specification_py3.py | 68 + .../datafactory/models/operation_paged.py | 27 + .../mgmt/datafactory/models/operation_py3.py | 41 + .../models/operation_service_specification.py | 34 + .../operation_service_specification_py3.py | 34 + .../models/oracle_linked_service.py | 71 + .../models/oracle_linked_service_py3.py | 71 + .../models/oracle_partition_settings.py | 46 + .../models/oracle_partition_settings_py3.py | 46 + .../oracle_service_cloud_linked_service.py | 95 + ...oracle_service_cloud_linked_service_py3.py | 95 + .../oracle_service_cloud_object_dataset.py | 72 + ...oracle_service_cloud_object_dataset_py3.py | 72 + .../models/oracle_service_cloud_source.py | 57 + .../models/oracle_service_cloud_source_py3.py | 57 + .../mgmt/datafactory/models/oracle_sink.py | 66 + .../datafactory/models/oracle_sink_py3.py | 66 + .../mgmt/datafactory/models/oracle_source.py | 76 + .../datafactory/models/oracle_source_py3.py | 76 + .../models/oracle_table_dataset.py | 82 + .../models/oracle_table_dataset_py3.py | 82 + .../mgmt/datafactory/models/orc_format.py | 46 + .../mgmt/datafactory/models/orc_format_py3.py | 46 + .../models/parameter_specification.py | 39 + .../models/parameter_specification_py3.py | 39 + .../datafactory/models/parquet_dataset.py | 76 + .../datafactory/models/parquet_dataset_py3.py | 76 + .../mgmt/datafactory/models/parquet_format.py | 46 + .../datafactory/models/parquet_format_py3.py | 46 + .../mgmt/datafactory/models/parquet_sink.py | 65 + .../datafactory/models/parquet_sink_py3.py | 65 + .../mgmt/datafactory/models/parquet_source.py | 56 + .../datafactory/models/parquet_source_py3.py | 56 + .../models/paypal_linked_service.py | 92 + .../models/paypal_linked_service_py3.py | 92 + .../models/paypal_object_dataset.py | 72 + .../models/paypal_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/paypal_source.py | 57 + .../datafactory/models/paypal_source_py3.py | 57 + .../models/phoenix_linked_service.py | 121 + .../models/phoenix_linked_service_py3.py | 121 + .../models/phoenix_object_dataset.py | 82 + .../models/phoenix_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/phoenix_source.py | 57 + .../datafactory/models/phoenix_source_py3.py | 57 + .../datafactory/models/pipeline_folder.py | 29 + .../datafactory/models/pipeline_folder_py3.py | 29 + .../datafactory/models/pipeline_reference.py | 48 + .../models/pipeline_reference_py3.py | 48 + .../datafactory/models/pipeline_resource.py | 84 + .../models/pipeline_resource_paged.py | 27 + .../models/pipeline_resource_py3.py | 84 + .../mgmt/datafactory/models/pipeline_run.py | 99 + .../models/pipeline_run_invoked_by.py | 45 + .../models/pipeline_run_invoked_by_py3.py | 45 + .../datafactory/models/pipeline_run_py3.py | 99 + .../models/pipeline_runs_query_response.py | 39 + .../pipeline_runs_query_response_py3.py | 39 + .../datafactory/models/polybase_settings.py | 53 + .../models/polybase_settings_py3.py | 53 + .../models/postgre_sql_linked_service.py | 70 + .../models/postgre_sql_linked_service_py3.py | 70 + .../datafactory/models/postgre_sql_source.py | 57 + .../models/postgre_sql_source_py3.py | 57 + .../models/postgre_sql_table_dataset.py | 82 + .../models/postgre_sql_table_dataset_py3.py | 82 + .../models/presto_linked_service.py | 132 + .../models/presto_linked_service_py3.py | 132 + .../models/presto_object_dataset.py | 82 + .../models/presto_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/presto_source.py | 57 + .../datafactory/models/presto_source_py3.py | 57 + .../models/quick_books_linked_service.py | 100 + .../models/quick_books_linked_service_py3.py | 100 + .../models/quick_books_object_dataset.py | 72 + .../models/quick_books_object_dataset_py3.py | 72 + .../datafactory/models/quick_books_source.py | 57 + .../models/quick_books_source_py3.py | 57 + .../datafactory/models/recurrence_schedule.py | 50 + .../models/recurrence_schedule_occurrence.py | 38 + .../recurrence_schedule_occurrence_py3.py | 38 + .../models/recurrence_schedule_py3.py | 50 + .../redirect_incompatible_row_settings.py | 47 + .../redirect_incompatible_row_settings_py3.py | 47 + .../models/redshift_unload_settings.py | 48 + .../models/redshift_unload_settings_py3.py | 48 + .../datafactory/models/relational_source.py | 57 + .../models/relational_source_py3.py | 57 + .../models/relational_table_dataset.py | 72 + .../models/relational_table_dataset_py3.py | 72 + .../models/rerun_trigger_resource.py | 54 + .../models/rerun_trigger_resource_paged.py | 27 + .../models/rerun_trigger_resource_py3.py | 54 + .../models/rerun_tumbling_window_trigger.py | 78 + ...mbling_window_trigger_action_parameters.py | 47 + ...ng_window_trigger_action_parameters_py3.py | 47 + .../rerun_tumbling_window_trigger_py3.py | 78 + .../azure/mgmt/datafactory/models/resource.py | 58 + .../mgmt/datafactory/models/resource_py3.py | 58 + .../models/responsys_linked_service.py | 94 + .../models/responsys_linked_service_py3.py | 94 + .../models/responsys_object_dataset.py | 72 + .../models/responsys_object_dataset_py3.py | 72 + .../datafactory/models/responsys_source.py | 57 + .../models/responsys_source_py3.py | 57 + .../models/rest_resource_dataset.py | 93 + .../models/rest_resource_dataset_py3.py | 93 + .../models/rest_service_linked_service.py | 107 + .../models/rest_service_linked_service_py3.py | 107 + .../mgmt/datafactory/models/rest_source.py | 86 + .../datafactory/models/rest_source_py3.py | 86 + .../mgmt/datafactory/models/retry_policy.py | 38 + .../datafactory/models/retry_policy_py3.py | 38 + .../models/run_filter_parameters.py | 54 + .../models/run_filter_parameters_py3.py | 54 + .../datafactory/models/run_query_filter.py | 53 + .../models/run_query_filter_py3.py | 53 + .../datafactory/models/run_query_order_by.py | 46 + .../models/run_query_order_by_py3.py | 46 + .../models/salesforce_linked_service.py | 82 + .../models/salesforce_linked_service_py3.py | 82 + ...lesforce_marketing_cloud_linked_service.py | 91 + ...orce_marketing_cloud_linked_service_py3.py | 91 + ...lesforce_marketing_cloud_object_dataset.py | 72 + ...orce_marketing_cloud_object_dataset_py3.py | 72 + .../salesforce_marketing_cloud_source.py | 57 + .../salesforce_marketing_cloud_source_py3.py | 57 + .../models/salesforce_object_dataset.py | 72 + .../models/salesforce_object_dataset_py3.py | 72 + ...salesforce_service_cloud_linked_service.py | 87 + ...sforce_service_cloud_linked_service_py3.py | 87 + ...salesforce_service_cloud_object_dataset.py | 72 + ...sforce_service_cloud_object_dataset_py3.py | 72 + .../models/salesforce_service_cloud_sink.py | 84 + .../salesforce_service_cloud_sink_py3.py | 84 + .../models/salesforce_service_cloud_source.py | 63 + .../salesforce_service_cloud_source_py3.py | 63 + .../datafactory/models/salesforce_sink.py | 84 + .../datafactory/models/salesforce_sink_py3.py | 84 + .../datafactory/models/salesforce_source.py | 63 + .../models/salesforce_source_py3.py | 63 + .../datafactory/models/sap_bw_cube_dataset.py | 67 + .../models/sap_bw_cube_dataset_py3.py | 67 + .../models/sap_bw_linked_service.py | 88 + .../models/sap_bw_linked_service_py3.py | 88 + .../mgmt/datafactory/models/sap_bw_source.py | 57 + .../datafactory/models/sap_bw_source_py3.py | 57 + .../sap_cloud_for_customer_linked_service.py | 76 + ...p_cloud_for_customer_linked_service_py3.py | 76 + ...sap_cloud_for_customer_resource_dataset.py | 73 + ...cloud_for_customer_resource_dataset_py3.py | 73 + .../models/sap_cloud_for_customer_sink.py | 67 + .../models/sap_cloud_for_customer_sink_py3.py | 67 + .../models/sap_cloud_for_customer_source.py | 57 + .../sap_cloud_for_customer_source_py3.py | 57 + .../models/sap_ecc_linked_service.py | 76 + .../models/sap_ecc_linked_service_py3.py | 76 + .../models/sap_ecc_resource_dataset.py | 73 + .../models/sap_ecc_resource_dataset_py3.py | 73 + .../mgmt/datafactory/models/sap_ecc_source.py | 57 + .../datafactory/models/sap_ecc_source_py3.py | 57 + .../models/sap_hana_linked_service.py | 85 + .../models/sap_hana_linked_service_py3.py | 85 + .../datafactory/models/sap_hana_source.py | 62 + .../datafactory/models/sap_hana_source_py3.py | 62 + .../models/sap_hana_table_dataset.py | 77 + .../models/sap_hana_table_dataset_py3.py | 77 + .../models/sap_open_hub_linked_service.py | 99 + .../models/sap_open_hub_linked_service_py3.py | 99 + .../datafactory/models/sap_open_hub_source.py | 66 + .../models/sap_open_hub_source_py3.py | 66 + .../models/sap_open_hub_table_dataset.py | 87 + .../models/sap_open_hub_table_dataset_py3.py | 87 + .../models/sap_table_linked_service.py | 140 + .../models/sap_table_linked_service_py3.py | 140 + .../models/sap_table_partition_settings.py | 47 + .../sap_table_partition_settings_py3.py | 47 + .../models/sap_table_resource_dataset.py | 73 + .../models/sap_table_resource_dataset_py3.py | 73 + .../datafactory/models/sap_table_source.py | 100 + .../models/sap_table_source_py3.py | 100 + .../datafactory/models/schedule_trigger.py | 64 + .../models/schedule_trigger_py3.py | 64 + .../models/schedule_trigger_recurrence.py | 54 + .../models/schedule_trigger_recurrence_py3.py | 54 + .../mgmt/datafactory/models/script_action.py | 49 + .../datafactory/models/script_action_py3.py | 49 + .../mgmt/datafactory/models/secret_base.py | 41 + .../datafactory/models/secret_base_py3.py | 41 + .../mgmt/datafactory/models/secure_string.py | 40 + .../datafactory/models/secure_string_py3.py | 40 + ...dency_tumbling_window_trigger_reference.py | 46 + ...y_tumbling_window_trigger_reference_py3.py | 46 + .../models/self_hosted_integration_runtime.py | 46 + .../self_hosted_integration_runtime_node.py | 139 + ...elf_hosted_integration_runtime_node_py3.py | 139 + .../self_hosted_integration_runtime_py3.py | 46 + .../self_hosted_integration_runtime_status.py | 146 + ...f_hosted_integration_runtime_status_py3.py | 146 + .../models/service_now_linked_service.py | 106 + .../models/service_now_linked_service_py3.py | 106 + .../models/service_now_object_dataset.py | 72 + .../models/service_now_object_dataset_py3.py | 72 + .../datafactory/models/service_now_source.py | 57 + .../models/service_now_source_py3.py | 57 + .../models/set_variable_activity.py | 59 + .../models/set_variable_activity_py3.py | 59 + .../mgmt/datafactory/models/sftp_location.py | 45 + .../datafactory/models/sftp_location_py3.py | 45 + .../datafactory/models/sftp_read_settings.py | 68 + .../models/sftp_read_settings_py3.py | 68 + .../models/sftp_server_linked_service.py | 119 + .../models/sftp_server_linked_service_py3.py | 119 + .../models/shopify_linked_service.py | 86 + .../models/shopify_linked_service_py3.py | 86 + .../models/shopify_object_dataset.py | 72 + .../models/shopify_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/shopify_source.py | 57 + .../datafactory/models/shopify_source_py3.py | 57 + .../models/spark_linked_service.py | 131 + .../models/spark_linked_service_py3.py | 131 + .../models/spark_object_dataset.py | 82 + .../models/spark_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/spark_source.py | 57 + .../datafactory/models/spark_source_py3.py | 57 + .../mgmt/datafactory/models/sql_dw_sink.py | 83 + .../datafactory/models/sql_dw_sink_py3.py | 83 + .../mgmt/datafactory/models/sql_dw_source.py | 70 + .../datafactory/models/sql_dw_source_py3.py | 70 + .../mgmt/datafactory/models/sql_mi_sink.py | 93 + .../datafactory/models/sql_mi_sink_py3.py | 93 + .../mgmt/datafactory/models/sql_mi_source.py | 73 + .../datafactory/models/sql_mi_source_py3.py | 73 + .../models/sql_server_linked_service.py | 74 + .../models/sql_server_linked_service_py3.py | 74 + .../datafactory/models/sql_server_sink.py | 93 + .../datafactory/models/sql_server_sink_py3.py | 93 + .../datafactory/models/sql_server_source.py | 73 + .../models/sql_server_source_py3.py | 73 + .../sql_server_stored_procedure_activity.py | 70 + ...ql_server_stored_procedure_activity_py3.py | 70 + .../models/sql_server_table_dataset.py | 82 + .../models/sql_server_table_dataset_py3.py | 82 + .../azure/mgmt/datafactory/models/sql_sink.py | 93 + .../mgmt/datafactory/models/sql_sink_py3.py | 93 + .../mgmt/datafactory/models/sql_source.py | 69 + .../mgmt/datafactory/models/sql_source_py3.py | 69 + .../models/square_linked_service.py | 98 + .../models/square_linked_service_py3.py | 98 + .../models/square_object_dataset.py | 72 + .../models/square_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/square_source.py | 57 + .../datafactory/models/square_source_py3.py | 57 + .../models/ssis_access_credential.py | 44 + .../models/ssis_access_credential_py3.py | 44 + .../datafactory/models/ssis_environment.py | 51 + .../models/ssis_environment_py3.py | 51 + .../models/ssis_environment_reference.py | 40 + .../models/ssis_environment_reference_py3.py | 40 + .../models/ssis_execution_credential.py | 44 + .../models/ssis_execution_credential_py3.py | 44 + .../models/ssis_execution_parameter.py | 35 + .../models/ssis_execution_parameter_py3.py | 35 + .../mgmt/datafactory/models/ssis_folder.py | 43 + .../datafactory/models/ssis_folder_py3.py | 43 + .../datafactory/models/ssis_log_location.py | 57 + .../models/ssis_log_location_py3.py | 57 + .../models/ssis_object_metadata.py | 53 + .../ssis_object_metadata_list_response.py | 33 + .../ssis_object_metadata_list_response_py3.py | 33 + .../models/ssis_object_metadata_py3.py | 53 + .../ssis_object_metadata_status_response.py | 40 + ...sis_object_metadata_status_response_py3.py | 40 + .../mgmt/datafactory/models/ssis_package.py | 59 + .../models/ssis_package_location.py | 54 + .../models/ssis_package_location_py3.py | 54 + .../datafactory/models/ssis_package_py3.py | 59 + .../mgmt/datafactory/models/ssis_parameter.py | 72 + .../datafactory/models/ssis_parameter_py3.py | 72 + .../mgmt/datafactory/models/ssis_project.py | 60 + .../datafactory/models/ssis_project_py3.py | 60 + .../models/ssis_property_override.py | 40 + .../models/ssis_property_override_py3.py | 40 + .../mgmt/datafactory/models/ssis_variable.py | 52 + .../datafactory/models/ssis_variable_py3.py | 52 + .../datafactory/models/staging_settings.py | 51 + .../models/staging_settings_py3.py | 51 + .../datafactory/models/store_read_settings.py | 45 + .../models/store_read_settings_py3.py | 45 + .../models/store_write_settings.py | 57 + .../models/store_write_settings_py3.py | 57 + .../models/stored_procedure_parameter.py | 35 + .../models/stored_procedure_parameter_py3.py | 35 + .../mgmt/datafactory/models/sub_resource.py | 50 + .../datafactory/models/sub_resource_py3.py | 50 + .../models/sybase_linked_service.py | 91 + .../models/sybase_linked_service_py3.py | 91 + .../mgmt/datafactory/models/sybase_source.py | 57 + .../datafactory/models/sybase_source_py3.py | 57 + .../models/sybase_table_dataset.py | 72 + .../models/sybase_table_dataset_py3.py | 72 + .../models/teradata_linked_service.py | 84 + .../models/teradata_linked_service_py3.py | 84 + .../models/teradata_partition_settings.py | 42 + .../models/teradata_partition_settings_py3.py | 42 + .../datafactory/models/teradata_source.py | 70 + .../datafactory/models/teradata_source_py3.py | 70 + .../models/teradata_table_dataset.py | 77 + .../models/teradata_table_dataset_py3.py | 77 + .../mgmt/datafactory/models/text_format.py | 99 + .../datafactory/models/text_format_py3.py | 99 + .../azure/mgmt/datafactory/models/trigger.py | 68 + .../models/trigger_dependency_reference.py | 46 + .../trigger_dependency_reference_py3.py | 46 + .../models/trigger_pipeline_reference.py | 32 + .../models/trigger_pipeline_reference_py3.py | 32 + .../mgmt/datafactory/models/trigger_py3.py | 68 + .../datafactory/models/trigger_reference.py | 44 + .../models/trigger_reference_py3.py | 44 + .../datafactory/models/trigger_resource.py | 53 + .../models/trigger_resource_paged.py | 27 + .../models/trigger_resource_py3.py | 53 + .../mgmt/datafactory/models/trigger_run.py | 78 + .../datafactory/models/trigger_run_py3.py | 78 + .../models/trigger_runs_query_response.py | 39 + .../models/trigger_runs_query_response_py3.py | 39 + .../trigger_subscription_operation_status.py | 42 + ...igger_subscription_operation_status_py3.py | 42 + .../models/tumbling_window_trigger.py | 112 + ...ing_window_trigger_dependency_reference.py | 50 + ...window_trigger_dependency_reference_py3.py | 50 + .../models/tumbling_window_trigger_py3.py | 112 + .../mgmt/datafactory/models/until_activity.py | 72 + .../datafactory/models/until_activity_py3.py | 72 + ...update_integration_runtime_node_request.py | 34 + ...te_integration_runtime_node_request_py3.py | 34 + .../update_integration_runtime_request.py | 38 + .../update_integration_runtime_request_py3.py | 38 + .../datafactory/models/user_access_policy.py | 51 + .../models/user_access_policy_py3.py | 51 + .../mgmt/datafactory/models/user_property.py | 40 + .../datafactory/models/user_property_py3.py | 40 + .../datafactory/models/validation_activity.py | 81 + .../models/validation_activity_py3.py | 81 + .../models/variable_specification.py | 39 + .../models/variable_specification_py3.py | 39 + .../models/vertica_linked_service.py | 69 + .../models/vertica_linked_service_py3.py | 69 + .../mgmt/datafactory/models/vertica_source.py | 57 + .../datafactory/models/vertica_source_py3.py | 57 + .../models/vertica_table_dataset.py | 82 + .../models/vertica_table_dataset_py3.py | 82 + .../mgmt/datafactory/models/wait_activity.py | 56 + .../datafactory/models/wait_activity_py3.py | 56 + .../mgmt/datafactory/models/web_activity.py | 98 + .../models/web_activity_authentication.py | 53 + .../models/web_activity_authentication_py3.py | 53 + .../datafactory/models/web_activity_py3.py | 98 + .../models/web_anonymous_authentication.py | 41 + .../web_anonymous_authentication_py3.py | 41 + .../models/web_basic_authentication.py | 52 + .../models/web_basic_authentication_py3.py | 52 + .../web_client_certificate_authentication.py | 53 + ...b_client_certificate_authentication_py3.py | 53 + .../datafactory/models/web_hook_activity.py | 92 + .../models/web_hook_activity_py3.py | 92 + .../datafactory/models/web_linked_service.py | 59 + .../models/web_linked_service_py3.py | 59 + .../web_linked_service_type_properties.py | 50 + .../web_linked_service_type_properties_py3.py | 50 + .../mgmt/datafactory/models/web_source.py | 52 + .../mgmt/datafactory/models/web_source_py3.py | 52 + .../datafactory/models/web_table_dataset.py | 78 + .../models/web_table_dataset_py3.py | 78 + .../datafactory/models/xero_linked_service.py | 93 + .../models/xero_linked_service_py3.py | 93 + .../datafactory/models/xero_object_dataset.py | 72 + .../models/xero_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/xero_source.py | 57 + .../datafactory/models/xero_source_py3.py | 57 + .../datafactory/models/zoho_linked_service.py | 85 + .../models/zoho_linked_service_py3.py | 85 + .../datafactory/models/zoho_object_dataset.py | 72 + .../models/zoho_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/zoho_source.py | 57 + .../datafactory/models/zoho_source_py3.py | 57 + .../mgmt/datafactory/operations/__init__.py | 28 +- ...rations.py => activity_runs_operations.py} | 3 +- ...s_operations.py => datasets_operations.py} | 18 +- ...ions.py => exposure_control_operations.py} | 4 +- ..._operations.py => factories_operations.py} | 36 +- ...> integration_runtime_nodes_operations.py} | 5 +- ...ion_runtime_object_metadata_operations.py} | 3 +- ....py => integration_runtimes_operations.py} | 25 +- ...tions.py => linked_services_operations.py} | 18 +- .../{_operations.py => operations.py} | 16 +- ...rations.py => pipeline_runs_operations.py} | 4 +- ..._operations.py => pipelines_operations.py} | 19 +- ...ations.py => rerun_triggers_operations.py} | 17 +- ...erations.py => trigger_runs_operations.py} | 3 +- ...s_operations.py => triggers_operations.py} | 19 +- .../azure/mgmt/datafactory/version.py | 2 +- 1062 files changed, 72162 insertions(+), 60022 deletions(-) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/{_data_factory_management_client.py => data_factory_management_client.py} (70%) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/{_data_factory_management_client_enums.py => data_factory_management_client_enums.py} (100%) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_activity_runs_operations.py => activity_runs_operations.py} (97%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_datasets_operations.py => datasets_operations.py} (97%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_exposure_control_operations.py => exposure_control_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_factories_operations.py => factories_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_integration_runtime_nodes_operations.py => integration_runtime_nodes_operations.py} (99%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_integration_runtime_object_metadata_operations.py => integration_runtime_object_metadata_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_integration_runtimes_operations.py => integration_runtimes_operations.py} (99%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_linked_services_operations.py => linked_services_operations.py} (97%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_operations.py => operations.py} (90%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_pipeline_runs_operations.py => pipeline_runs_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_pipelines_operations.py => pipelines_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_rerun_triggers_operations.py => rerun_triggers_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_trigger_runs_operations.py => trigger_runs_operations.py} (98%) rename sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/{_triggers_operations.py => triggers_operations.py} (98%) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py index 5ab3f5226cb2..db14f5d7f4f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py @@ -9,11 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- -from ._configuration import DataFactoryManagementClientConfiguration -from ._data_factory_management_client import DataFactoryManagementClient -__all__ = ['DataFactoryManagementClient', 'DataFactoryManagementClientConfiguration'] - +from .data_factory_management_client import DataFactoryManagementClient from .version import VERSION +__all__ = ['DataFactoryManagementClient'] + __version__ = VERSION diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py deleted file mode 100644 index 80666808edb1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- -from msrestazure import AzureConfiguration - -from .version import VERSION - - -class DataFactoryManagementClientConfiguration(AzureConfiguration): - """Configuration for DataFactoryManagementClient - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credentials: Credentials needed for the client to connect to Azure. - :type credentials: :mod:`A msrestazure Credentials - object` - :param subscription_id: The subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - """ - - def __init__( - self, credentials, subscription_id, base_url=None): - - if credentials is None: - raise ValueError("Parameter 'credentials' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - if not base_url: - base_url = 'https://management.azure.com' - - super(DataFactoryManagementClientConfiguration, self).__init__(base_url) - - # Starting Autorest.Python 4.0.64, make connection pool activated by default - self.keep_alive = True - - self.add_user_agent('azure-mgmt-datafactory/{}'.format(VERSION)) - self.add_user_agent('Azure-SDK-For-Python') - - self.credentials = credentials - self.subscription_id = subscription_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py similarity index 70% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py index 14cee0777347..bb8a2a22fd77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -11,25 +11,57 @@ from msrest.service_client import SDKClient from msrest import Serializer, Deserializer - -from ._configuration import DataFactoryManagementClientConfiguration -from .operations import Operations -from .operations import FactoriesOperations -from .operations import ExposureControlOperations -from .operations import IntegrationRuntimesOperations -from .operations import IntegrationRuntimeObjectMetadataOperations -from .operations import IntegrationRuntimeNodesOperations -from .operations import LinkedServicesOperations -from .operations import DatasetsOperations -from .operations import PipelinesOperations -from .operations import PipelineRunsOperations -from .operations import ActivityRunsOperations -from .operations import TriggersOperations -from .operations import TriggerRunsOperations -from .operations import RerunTriggersOperations +from msrestazure import AzureConfiguration +from .version import VERSION +from .operations.operations import Operations +from .operations.factories_operations import FactoriesOperations +from .operations.exposure_control_operations import ExposureControlOperations +from .operations.integration_runtimes_operations import IntegrationRuntimesOperations +from .operations.integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from .operations.integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from .operations.linked_services_operations import LinkedServicesOperations +from .operations.datasets_operations import DatasetsOperations +from .operations.pipelines_operations import PipelinesOperations +from .operations.pipeline_runs_operations import PipelineRunsOperations +from .operations.activity_runs_operations import ActivityRunsOperations +from .operations.triggers_operations import TriggersOperations +from .operations.trigger_runs_operations import TriggerRunsOperations +from .operations.rerun_triggers_operations import RerunTriggersOperations from . import models +class DataFactoryManagementClientConfiguration(AzureConfiguration): + """Configuration for DataFactoryManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: The subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + if not base_url: + base_url = 'https://management.azure.com' + + super(DataFactoryManagementClientConfiguration, self).__init__(base_url) + + self.add_user_agent('azure-mgmt-datafactory/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.subscription_id = subscription_id + + class DataFactoryManagementClient(SDKClient): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index f45fba3e8bbf..395f7908afbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -10,1048 +10,1046 @@ # -------------------------------------------------------------------------- try: - from ._models_py3 import AccessPolicyResponse - from ._models_py3 import Activity - from ._models_py3 import ActivityDependency - from ._models_py3 import ActivityPolicy - from ._models_py3 import ActivityRun - from ._models_py3 import ActivityRunsQueryResponse - from ._models_py3 import AmazonMWSLinkedService - from ._models_py3 import AmazonMWSObjectDataset - from ._models_py3 import AmazonMWSSource - from ._models_py3 import AmazonRedshiftLinkedService - from ._models_py3 import AmazonRedshiftSource - from ._models_py3 import AmazonRedshiftTableDataset - from ._models_py3 import AmazonS3Dataset - from ._models_py3 import AmazonS3LinkedService - from ._models_py3 import AmazonS3Location - from ._models_py3 import AmazonS3ReadSettings - from ._models_py3 import AppendVariableActivity - from ._models_py3 import AvroDataset - from ._models_py3 import AvroFormat - from ._models_py3 import AvroSink - from ._models_py3 import AvroSource - from ._models_py3 import AvroWriteSettings - from ._models_py3 import AzureBatchLinkedService - from ._models_py3 import AzureBlobDataset - from ._models_py3 import AzureBlobFSDataset - from ._models_py3 import AzureBlobFSLinkedService - from ._models_py3 import AzureBlobFSLocation - from ._models_py3 import AzureBlobFSReadSettings - from ._models_py3 import AzureBlobFSSink - from ._models_py3 import AzureBlobFSSource - from ._models_py3 import AzureBlobFSWriteSettings - from ._models_py3 import AzureBlobStorageLinkedService - from ._models_py3 import AzureBlobStorageLocation - from ._models_py3 import AzureBlobStorageReadSettings - from ._models_py3 import AzureBlobStorageWriteSettings - from ._models_py3 import AzureDatabricksLinkedService - from ._models_py3 import AzureDataExplorerCommandActivity - from ._models_py3 import AzureDataExplorerLinkedService - from ._models_py3 import AzureDataExplorerSink - from ._models_py3 import AzureDataExplorerSource - from ._models_py3 import AzureDataExplorerTableDataset - from ._models_py3 import AzureDataLakeAnalyticsLinkedService - from ._models_py3 import AzureDataLakeStoreDataset - from ._models_py3 import AzureDataLakeStoreLinkedService - from ._models_py3 import AzureDataLakeStoreLocation - from ._models_py3 import AzureDataLakeStoreReadSettings - from ._models_py3 import AzureDataLakeStoreSink - from ._models_py3 import AzureDataLakeStoreSource - from ._models_py3 import AzureDataLakeStoreWriteSettings - from ._models_py3 import AzureFunctionActivity - from ._models_py3 import AzureFunctionLinkedService - from ._models_py3 import AzureKeyVaultLinkedService - from ._models_py3 import AzureKeyVaultSecretReference - from ._models_py3 import AzureMariaDBLinkedService - from ._models_py3 import AzureMariaDBSource - from ._models_py3 import AzureMariaDBTableDataset - from ._models_py3 import AzureMLBatchExecutionActivity - from ._models_py3 import AzureMLLinkedService - from ._models_py3 import AzureMLUpdateResourceActivity - from ._models_py3 import AzureMLWebServiceFile - from ._models_py3 import AzureMySqlLinkedService - from ._models_py3 import AzureMySqlSink - from ._models_py3 import AzureMySqlSource - from ._models_py3 import AzureMySqlTableDataset - from ._models_py3 import AzurePostgreSqlLinkedService - from ._models_py3 import AzurePostgreSqlSink - from ._models_py3 import AzurePostgreSqlSource - from ._models_py3 import AzurePostgreSqlTableDataset - from ._models_py3 import AzureQueueSink - from ._models_py3 import AzureSearchIndexDataset - from ._models_py3 import AzureSearchIndexSink - from ._models_py3 import AzureSearchLinkedService - from ._models_py3 import AzureSqlDatabaseLinkedService - from ._models_py3 import AzureSqlDWLinkedService - from ._models_py3 import AzureSqlDWTableDataset - from ._models_py3 import AzureSqlMILinkedService - from ._models_py3 import AzureSqlMITableDataset - from ._models_py3 import AzureSqlSink - from ._models_py3 import AzureSqlSource - from ._models_py3 import AzureSqlTableDataset - from ._models_py3 import AzureStorageLinkedService - from ._models_py3 import AzureTableDataset - from ._models_py3 import AzureTableSink - from ._models_py3 import AzureTableSource - from ._models_py3 import AzureTableStorageLinkedService - from ._models_py3 import BinaryDataset - from ._models_py3 import BinarySink - from ._models_py3 import BinarySource - from ._models_py3 import BlobEventsTrigger - from ._models_py3 import BlobSink - from ._models_py3 import BlobSource - from ._models_py3 import BlobTrigger - from ._models_py3 import CassandraLinkedService - from ._models_py3 import CassandraSource - from ._models_py3 import CassandraTableDataset - from ._models_py3 import ChainingTrigger - from ._models_py3 import CommonDataServiceForAppsEntityDataset - from ._models_py3 import CommonDataServiceForAppsLinkedService - from ._models_py3 import CommonDataServiceForAppsSink - from ._models_py3 import CommonDataServiceForAppsSource - from ._models_py3 import ConcurLinkedService - from ._models_py3 import ConcurObjectDataset - from ._models_py3 import ConcurSource - from ._models_py3 import ControlActivity - from ._models_py3 import CopyActivity - from ._models_py3 import CopySink - from ._models_py3 import CopySource - from ._models_py3 import CosmosDbLinkedService - from ._models_py3 import CosmosDbMongoDbApiCollectionDataset - from ._models_py3 import CosmosDbMongoDbApiLinkedService - from ._models_py3 import CosmosDbMongoDbApiSink - from ._models_py3 import CosmosDbMongoDbApiSource - from ._models_py3 import CouchbaseLinkedService - from ._models_py3 import CouchbaseSource - from ._models_py3 import CouchbaseTableDataset - from ._models_py3 import CreateLinkedIntegrationRuntimeRequest - from ._models_py3 import CreateRunResponse - from ._models_py3 import CustomActivity - from ._models_py3 import CustomActivityReferenceObject - from ._models_py3 import CustomDataset - from ._models_py3 import CustomDataSourceLinkedService - from ._models_py3 import DatabricksNotebookActivity - from ._models_py3 import DatabricksSparkJarActivity - from ._models_py3 import DatabricksSparkPythonActivity - from ._models_py3 import DataLakeAnalyticsUSQLActivity - from ._models_py3 import Dataset - from ._models_py3 import DatasetBZip2Compression - from ._models_py3 import DatasetCompression - from ._models_py3 import DatasetDeflateCompression - from ._models_py3 import DatasetFolder - from ._models_py3 import DatasetGZipCompression - from ._models_py3 import DatasetLocation - from ._models_py3 import DatasetReference - from ._models_py3 import DatasetResource - from ._models_py3 import DatasetStorageFormat - from ._models_py3 import DatasetZipDeflateCompression - from ._models_py3 import Db2LinkedService - from ._models_py3 import Db2Source - from ._models_py3 import Db2TableDataset - from ._models_py3 import DeleteActivity - from ._models_py3 import DelimitedTextDataset - from ._models_py3 import DelimitedTextReadSettings - from ._models_py3 import DelimitedTextSink - from ._models_py3 import DelimitedTextSource - from ._models_py3 import DelimitedTextWriteSettings - from ._models_py3 import DependencyReference - from ._models_py3 import DistcpSettings - from ._models_py3 import DocumentDbCollectionDataset - from ._models_py3 import DocumentDbCollectionSink - from ._models_py3 import DocumentDbCollectionSource - from ._models_py3 import DrillLinkedService - from ._models_py3 import DrillSource - from ._models_py3 import DrillTableDataset - from ._models_py3 import DynamicsAXLinkedService - from ._models_py3 import DynamicsAXResourceDataset - from ._models_py3 import DynamicsAXSource - from ._models_py3 import DynamicsCrmEntityDataset - from ._models_py3 import DynamicsCrmLinkedService - from ._models_py3 import DynamicsCrmSink - from ._models_py3 import DynamicsCrmSource - from ._models_py3 import DynamicsEntityDataset - from ._models_py3 import DynamicsLinkedService - from ._models_py3 import DynamicsSink - from ._models_py3 import DynamicsSource - from ._models_py3 import EloquaLinkedService - from ._models_py3 import EloquaObjectDataset - from ._models_py3 import EloquaSource - from ._models_py3 import EntityReference - from ._models_py3 import ExecutePipelineActivity - from ._models_py3 import ExecuteSSISPackageActivity - from ._models_py3 import ExecutionActivity - from ._models_py3 import ExposureControlRequest - from ._models_py3 import ExposureControlResponse - from ._models_py3 import Expression - from ._models_py3 import Factory - from ._models_py3 import FactoryGitHubConfiguration - from ._models_py3 import FactoryIdentity - from ._models_py3 import FactoryRepoConfiguration - from ._models_py3 import FactoryRepoUpdate - from ._models_py3 import FactoryUpdateParameters - from ._models_py3 import FactoryVSTSConfiguration - from ._models_py3 import FileServerLinkedService - from ._models_py3 import FileServerLocation - from ._models_py3 import FileServerReadSettings - from ._models_py3 import FileServerWriteSettings - from ._models_py3 import FileShareDataset - from ._models_py3 import FileSystemSink - from ._models_py3 import FileSystemSource - from ._models_py3 import FilterActivity - from ._models_py3 import ForEachActivity - from ._models_py3 import FormatReadSettings - from ._models_py3 import FormatWriteSettings - from ._models_py3 import FtpReadSettings - from ._models_py3 import FtpServerLinkedService - from ._models_py3 import FtpServerLocation - from ._models_py3 import GetMetadataActivity - from ._models_py3 import GetSsisObjectMetadataRequest - from ._models_py3 import GitHubAccessTokenRequest - from ._models_py3 import GitHubAccessTokenResponse - from ._models_py3 import GoogleAdWordsLinkedService - from ._models_py3 import GoogleAdWordsObjectDataset - from ._models_py3 import GoogleAdWordsSource - from ._models_py3 import GoogleBigQueryLinkedService - from ._models_py3 import GoogleBigQueryObjectDataset - from ._models_py3 import GoogleBigQuerySource - from ._models_py3 import GreenplumLinkedService - from ._models_py3 import GreenplumSource - from ._models_py3 import GreenplumTableDataset - from ._models_py3 import HBaseLinkedService - from ._models_py3 import HBaseObjectDataset - from ._models_py3 import HBaseSource - from ._models_py3 import HdfsLinkedService - from ._models_py3 import HdfsLocation - from ._models_py3 import HdfsReadSettings - from ._models_py3 import HdfsSource - from ._models_py3 import HDInsightHiveActivity - from ._models_py3 import HDInsightLinkedService - from ._models_py3 import HDInsightMapReduceActivity - from ._models_py3 import HDInsightOnDemandLinkedService - from ._models_py3 import HDInsightPigActivity - from ._models_py3 import HDInsightSparkActivity - from ._models_py3 import HDInsightStreamingActivity - from ._models_py3 import HiveLinkedService - from ._models_py3 import HiveObjectDataset - from ._models_py3 import HiveSource - from ._models_py3 import HttpDataset - from ._models_py3 import HttpLinkedService - from ._models_py3 import HttpReadSettings - from ._models_py3 import HttpServerLocation - from ._models_py3 import HttpSource - from ._models_py3 import HubspotLinkedService - from ._models_py3 import HubspotObjectDataset - from ._models_py3 import HubspotSource - from ._models_py3 import IfConditionActivity - from ._models_py3 import ImpalaLinkedService - from ._models_py3 import ImpalaObjectDataset - from ._models_py3 import ImpalaSource - from ._models_py3 import InformixLinkedService - from ._models_py3 import InformixSink - from ._models_py3 import InformixSource - from ._models_py3 import InformixTableDataset - from ._models_py3 import IntegrationRuntime - from ._models_py3 import IntegrationRuntimeAuthKeys - from ._models_py3 import IntegrationRuntimeComputeProperties - from ._models_py3 import IntegrationRuntimeConnectionInfo - from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties - from ._models_py3 import IntegrationRuntimeDataProxyProperties - from ._models_py3 import IntegrationRuntimeMonitoringData - from ._models_py3 import IntegrationRuntimeNodeIpAddress - from ._models_py3 import IntegrationRuntimeNodeMonitoringData - from ._models_py3 import IntegrationRuntimeReference - from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters - from ._models_py3 import IntegrationRuntimeResource - from ._models_py3 import IntegrationRuntimeSsisCatalogInfo - from ._models_py3 import IntegrationRuntimeSsisProperties - from ._models_py3 import IntegrationRuntimeStatus - from ._models_py3 import IntegrationRuntimeStatusListResponse - from ._models_py3 import IntegrationRuntimeStatusResponse - from ._models_py3 import IntegrationRuntimeVNetProperties - from ._models_py3 import JiraLinkedService - from ._models_py3 import JiraObjectDataset - from ._models_py3 import JiraSource - from ._models_py3 import JsonDataset - from ._models_py3 import JsonFormat - from ._models_py3 import JsonSink - from ._models_py3 import JsonSource - from ._models_py3 import JsonWriteSettings - from ._models_py3 import LinkedIntegrationRuntime - from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization - from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization - from ._models_py3 import LinkedIntegrationRuntimeRequest - from ._models_py3 import LinkedIntegrationRuntimeType - from ._models_py3 import LinkedService - from ._models_py3 import LinkedServiceReference - from ._models_py3 import LinkedServiceResource - from ._models_py3 import LogStorageSettings - from ._models_py3 import LookupActivity - from ._models_py3 import MagentoLinkedService - from ._models_py3 import MagentoObjectDataset - from ._models_py3 import MagentoSource - from ._models_py3 import ManagedIntegrationRuntime - from ._models_py3 import ManagedIntegrationRuntimeError - from ._models_py3 import ManagedIntegrationRuntimeNode - from ._models_py3 import ManagedIntegrationRuntimeOperationResult - from ._models_py3 import ManagedIntegrationRuntimeStatus - from ._models_py3 import MariaDBLinkedService - from ._models_py3 import MariaDBSource - from ._models_py3 import MariaDBTableDataset - from ._models_py3 import MarketoLinkedService - from ._models_py3 import MarketoObjectDataset - from ._models_py3 import MarketoSource - from ._models_py3 import MicrosoftAccessLinkedService - from ._models_py3 import MicrosoftAccessSink - from ._models_py3 import MicrosoftAccessSource - from ._models_py3 import MicrosoftAccessTableDataset - from ._models_py3 import MongoDbCollectionDataset - from ._models_py3 import MongoDbCursorMethodsProperties - from ._models_py3 import MongoDbLinkedService - from ._models_py3 import MongoDbSource - from ._models_py3 import MongoDbV2CollectionDataset - from ._models_py3 import MongoDbV2LinkedService - from ._models_py3 import MongoDbV2Source - from ._models_py3 import MultiplePipelineTrigger - from ._models_py3 import MySqlLinkedService - from ._models_py3 import MySqlSource - from ._models_py3 import MySqlTableDataset - from ._models_py3 import NetezzaLinkedService - from ._models_py3 import NetezzaPartitionSettings - from ._models_py3 import NetezzaSource - from ._models_py3 import NetezzaTableDataset - from ._models_py3 import ODataLinkedService - from ._models_py3 import ODataResourceDataset - from ._models_py3 import ODataSource - from ._models_py3 import OdbcLinkedService - from ._models_py3 import OdbcSink - from ._models_py3 import OdbcSource - from ._models_py3 import OdbcTableDataset - from ._models_py3 import Office365Dataset - from ._models_py3 import Office365LinkedService - from ._models_py3 import Office365Source - from ._models_py3 import Operation - from ._models_py3 import OperationDisplay - from ._models_py3 import OperationLogSpecification - from ._models_py3 import OperationMetricAvailability - from ._models_py3 import OperationMetricDimension - from ._models_py3 import OperationMetricSpecification - from ._models_py3 import OperationServiceSpecification - from ._models_py3 import OracleLinkedService - from ._models_py3 import OraclePartitionSettings - from ._models_py3 import OracleServiceCloudLinkedService - from ._models_py3 import OracleServiceCloudObjectDataset - from ._models_py3 import OracleServiceCloudSource - from ._models_py3 import OracleSink - from ._models_py3 import OracleSource - from ._models_py3 import OracleTableDataset - from ._models_py3 import OrcFormat - from ._models_py3 import ParameterSpecification - from ._models_py3 import ParquetDataset - from ._models_py3 import ParquetFormat - from ._models_py3 import ParquetSink - from ._models_py3 import ParquetSource - from ._models_py3 import PaypalLinkedService - from ._models_py3 import PaypalObjectDataset - from ._models_py3 import PaypalSource - from ._models_py3 import PhoenixLinkedService - from ._models_py3 import PhoenixObjectDataset - from ._models_py3 import PhoenixSource - from ._models_py3 import PipelineFolder - from ._models_py3 import PipelineReference - from ._models_py3 import PipelineResource - from ._models_py3 import PipelineRun - from ._models_py3 import PipelineRunInvokedBy - from ._models_py3 import PipelineRunsQueryResponse - from ._models_py3 import PolybaseSettings - from ._models_py3 import PostgreSqlLinkedService - from ._models_py3 import PostgreSqlSource - from ._models_py3 import PostgreSqlTableDataset - from ._models_py3 import PrestoLinkedService - from ._models_py3 import PrestoObjectDataset - from ._models_py3 import PrestoSource - from ._models_py3 import QuickBooksLinkedService - from ._models_py3 import QuickBooksObjectDataset - from ._models_py3 import QuickBooksSource - from ._models_py3 import RecurrenceSchedule - from ._models_py3 import RecurrenceScheduleOccurrence - from ._models_py3 import RedirectIncompatibleRowSettings - from ._models_py3 import RedshiftUnloadSettings - from ._models_py3 import RelationalSource - from ._models_py3 import RelationalTableDataset - from ._models_py3 import RerunTriggerResource - from ._models_py3 import RerunTumblingWindowTrigger - from ._models_py3 import RerunTumblingWindowTriggerActionParameters - from ._models_py3 import Resource - from ._models_py3 import ResponsysLinkedService - from ._models_py3 import ResponsysObjectDataset - from ._models_py3 import ResponsysSource - from ._models_py3 import RestResourceDataset - from ._models_py3 import RestServiceLinkedService - from ._models_py3 import RestSource - from ._models_py3 import RetryPolicy - from ._models_py3 import RunFilterParameters - from ._models_py3 import RunQueryFilter - from ._models_py3 import RunQueryOrderBy - from ._models_py3 import SalesforceLinkedService - from ._models_py3 import SalesforceMarketingCloudLinkedService - from ._models_py3 import SalesforceMarketingCloudObjectDataset - from ._models_py3 import SalesforceMarketingCloudSource - from ._models_py3 import SalesforceObjectDataset - from ._models_py3 import SalesforceServiceCloudLinkedService - from ._models_py3 import SalesforceServiceCloudObjectDataset - from ._models_py3 import SalesforceServiceCloudSink - from ._models_py3 import SalesforceServiceCloudSource - from ._models_py3 import SalesforceSink - from ._models_py3 import SalesforceSource - from ._models_py3 import SapBwCubeDataset - from ._models_py3 import SapBWLinkedService - from ._models_py3 import SapBwSource - from ._models_py3 import SapCloudForCustomerLinkedService - from ._models_py3 import SapCloudForCustomerResourceDataset - from ._models_py3 import SapCloudForCustomerSink - from ._models_py3 import SapCloudForCustomerSource - from ._models_py3 import SapEccLinkedService - from ._models_py3 import SapEccResourceDataset - from ._models_py3 import SapEccSource - from ._models_py3 import SapHanaLinkedService - from ._models_py3 import SapHanaSource - from ._models_py3 import SapHanaTableDataset - from ._models_py3 import SapOpenHubLinkedService - from ._models_py3 import SapOpenHubSource - from ._models_py3 import SapOpenHubTableDataset - from ._models_py3 import SapTableLinkedService - from ._models_py3 import SapTablePartitionSettings - from ._models_py3 import SapTableResourceDataset - from ._models_py3 import SapTableSource - from ._models_py3 import ScheduleTrigger - from ._models_py3 import ScheduleTriggerRecurrence - from ._models_py3 import ScriptAction - from ._models_py3 import SecretBase - from ._models_py3 import SecureString - from ._models_py3 import SelfDependencyTumblingWindowTriggerReference - from ._models_py3 import SelfHostedIntegrationRuntime - from ._models_py3 import SelfHostedIntegrationRuntimeNode - from ._models_py3 import SelfHostedIntegrationRuntimeStatus - from ._models_py3 import ServiceNowLinkedService - from ._models_py3 import ServiceNowObjectDataset - from ._models_py3 import ServiceNowSource - from ._models_py3 import SetVariableActivity - from ._models_py3 import SftpLocation - from ._models_py3 import SftpReadSettings - from ._models_py3 import SftpServerLinkedService - from ._models_py3 import ShopifyLinkedService - from ._models_py3 import ShopifyObjectDataset - from ._models_py3 import ShopifySource - from ._models_py3 import SparkLinkedService - from ._models_py3 import SparkObjectDataset - from ._models_py3 import SparkSource - from ._models_py3 import SqlDWSink - from ._models_py3 import SqlDWSource - from ._models_py3 import SqlMISink - from ._models_py3 import SqlMISource - from ._models_py3 import SqlServerLinkedService - from ._models_py3 import SqlServerSink - from ._models_py3 import SqlServerSource - from ._models_py3 import SqlServerStoredProcedureActivity - from ._models_py3 import SqlServerTableDataset - from ._models_py3 import SqlSink - from ._models_py3 import SqlSource - from ._models_py3 import SquareLinkedService - from ._models_py3 import SquareObjectDataset - from ._models_py3 import SquareSource - from ._models_py3 import SSISAccessCredential - from ._models_py3 import SsisEnvironment - from ._models_py3 import SsisEnvironmentReference - from ._models_py3 import SSISExecutionCredential - from ._models_py3 import SSISExecutionParameter - from ._models_py3 import SsisFolder - from ._models_py3 import SSISLogLocation - from ._models_py3 import SsisObjectMetadata - from ._models_py3 import SsisObjectMetadataListResponse - from ._models_py3 import SsisObjectMetadataStatusResponse - from ._models_py3 import SsisPackage - from ._models_py3 import SSISPackageLocation - from ._models_py3 import SsisParameter - from ._models_py3 import SsisProject - from ._models_py3 import SSISPropertyOverride - from ._models_py3 import SsisVariable - from ._models_py3 import StagingSettings - from ._models_py3 import StoredProcedureParameter - from ._models_py3 import StoreReadSettings - from ._models_py3 import StoreWriteSettings - from ._models_py3 import SubResource - from ._models_py3 import SybaseLinkedService - from ._models_py3 import SybaseSource - from ._models_py3 import SybaseTableDataset - from ._models_py3 import TeradataLinkedService - from ._models_py3 import TeradataPartitionSettings - from ._models_py3 import TeradataSource - from ._models_py3 import TeradataTableDataset - from ._models_py3 import TextFormat - from ._models_py3 import Trigger - from ._models_py3 import TriggerDependencyReference - from ._models_py3 import TriggerPipelineReference - from ._models_py3 import TriggerReference - from ._models_py3 import TriggerResource - from ._models_py3 import TriggerRun - from ._models_py3 import TriggerRunsQueryResponse - from ._models_py3 import TriggerSubscriptionOperationStatus - from ._models_py3 import TumblingWindowTrigger - from ._models_py3 import TumblingWindowTriggerDependencyReference - from ._models_py3 import UntilActivity - from ._models_py3 import UpdateIntegrationRuntimeNodeRequest - from ._models_py3 import UpdateIntegrationRuntimeRequest - from ._models_py3 import UserAccessPolicy - from ._models_py3 import UserProperty - from ._models_py3 import ValidationActivity - from ._models_py3 import VariableSpecification - from ._models_py3 import VerticaLinkedService - from ._models_py3 import VerticaSource - from ._models_py3 import VerticaTableDataset - from ._models_py3 import WaitActivity - from ._models_py3 import WebActivity - from ._models_py3 import WebActivityAuthentication - from ._models_py3 import WebAnonymousAuthentication - from ._models_py3 import WebBasicAuthentication - from ._models_py3 import WebClientCertificateAuthentication - from ._models_py3 import WebHookActivity - from ._models_py3 import WebLinkedService - from ._models_py3 import WebLinkedServiceTypeProperties - from ._models_py3 import WebSource - from ._models_py3 import WebTableDataset - from ._models_py3 import XeroLinkedService - from ._models_py3 import XeroObjectDataset - from ._models_py3 import XeroSource - from ._models_py3 import ZohoLinkedService - from ._models_py3 import ZohoObjectDataset - from ._models_py3 import ZohoSource + from .resource_py3 import Resource + from .sub_resource_py3 import SubResource + from .expression_py3 import Expression + from .secure_string_py3 import SecureString + from .linked_service_reference_py3 import LinkedServiceReference + from .azure_key_vault_secret_reference_py3 import AzureKeyVaultSecretReference + from .secret_base_py3 import SecretBase + from .factory_identity_py3 import FactoryIdentity + from .factory_repo_configuration_py3 import FactoryRepoConfiguration + from .factory_py3 import Factory + from .integration_runtime_py3 import IntegrationRuntime + from .integration_runtime_resource_py3 import IntegrationRuntimeResource + from .integration_runtime_reference_py3 import IntegrationRuntimeReference + from .integration_runtime_status_py3 import IntegrationRuntimeStatus + from .integration_runtime_status_response_py3 import IntegrationRuntimeStatusResponse + from .integration_runtime_status_list_response_py3 import IntegrationRuntimeStatusListResponse + from .update_integration_runtime_request_py3 import UpdateIntegrationRuntimeRequest + from .update_integration_runtime_node_request_py3 import UpdateIntegrationRuntimeNodeRequest + from .linked_integration_runtime_request_py3 import LinkedIntegrationRuntimeRequest + from .create_linked_integration_runtime_request_py3 import CreateLinkedIntegrationRuntimeRequest + from .parameter_specification_py3 import ParameterSpecification + from .linked_service_py3 import LinkedService + from .linked_service_resource_py3 import LinkedServiceResource + from .dataset_folder_py3 import DatasetFolder + from .dataset_py3 import Dataset + from .dataset_resource_py3 import DatasetResource + from .activity_dependency_py3 import ActivityDependency + from .user_property_py3 import UserProperty + from .activity_py3 import Activity + from .variable_specification_py3 import VariableSpecification + from .pipeline_folder_py3 import PipelineFolder + from .pipeline_resource_py3 import PipelineResource + from .trigger_py3 import Trigger + from .trigger_resource_py3 import TriggerResource + from .create_run_response_py3 import CreateRunResponse + from .trigger_subscription_operation_status_py3 import TriggerSubscriptionOperationStatus + from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration + from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration + from .factory_repo_update_py3 import FactoryRepoUpdate + from .git_hub_access_token_request_py3 import GitHubAccessTokenRequest + from .git_hub_access_token_response_py3 import GitHubAccessTokenResponse + from .user_access_policy_py3 import UserAccessPolicy + from .access_policy_response_py3 import AccessPolicyResponse + from .pipeline_reference_py3 import PipelineReference + from .trigger_pipeline_reference_py3 import TriggerPipelineReference + from .factory_update_parameters_py3 import FactoryUpdateParameters + from .dataset_reference_py3 import DatasetReference + from .run_query_filter_py3 import RunQueryFilter + from .run_query_order_by_py3 import RunQueryOrderBy + from .run_filter_parameters_py3 import RunFilterParameters + from .pipeline_run_invoked_by_py3 import PipelineRunInvokedBy + from .pipeline_run_py3 import PipelineRun + from .pipeline_runs_query_response_py3 import PipelineRunsQueryResponse + from .activity_run_py3 import ActivityRun + from .activity_runs_query_response_py3 import ActivityRunsQueryResponse + from .trigger_run_py3 import TriggerRun + from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger + from .rerun_trigger_resource_py3 import RerunTriggerResource + from .operation_display_py3 import OperationDisplay + from .operation_log_specification_py3 import OperationLogSpecification + from .operation_metric_availability_py3 import OperationMetricAvailability + from .operation_metric_dimension_py3 import OperationMetricDimension + from .operation_metric_specification_py3 import OperationMetricSpecification + from .operation_service_specification_py3 import OperationServiceSpecification + from .operation_py3 import Operation + from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest + from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse + from .exposure_control_request_py3 import ExposureControlRequest + from .exposure_control_response_py3 import ExposureControlResponse + from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference_py3 import TriggerReference + from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference_py3 import TriggerDependencyReference + from .dependency_reference_py3 import DependencyReference + from .retry_policy_py3 import RetryPolicy + from .tumbling_window_trigger_py3 import TumblingWindowTrigger + from .blob_events_trigger_py3 import BlobEventsTrigger + from .blob_trigger_py3 import BlobTrigger + from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence + from .recurrence_schedule_py3 import RecurrenceSchedule + from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence + from .schedule_trigger_py3 import ScheduleTrigger + from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + from .azure_function_linked_service_py3 import AzureFunctionLinkedService + from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .sap_table_linked_service_py3 import SapTableLinkedService + from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService + from .responsys_linked_service_py3 import ResponsysLinkedService + from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService + from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService + from .script_action_py3 import ScriptAction + from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService + from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService + from .netezza_linked_service_py3 import NetezzaLinkedService + from .vertica_linked_service_py3 import VerticaLinkedService + from .zoho_linked_service_py3 import ZohoLinkedService + from .xero_linked_service_py3 import XeroLinkedService + from .square_linked_service_py3 import SquareLinkedService + from .spark_linked_service_py3 import SparkLinkedService + from .shopify_linked_service_py3 import ShopifyLinkedService + from .service_now_linked_service_py3 import ServiceNowLinkedService + from .quick_books_linked_service_py3 import QuickBooksLinkedService + from .presto_linked_service_py3 import PrestoLinkedService + from .phoenix_linked_service_py3 import PhoenixLinkedService + from .paypal_linked_service_py3 import PaypalLinkedService + from .marketo_linked_service_py3 import MarketoLinkedService + from .azure_maria_db_linked_service_py3 import AzureMariaDBLinkedService + from .maria_db_linked_service_py3 import MariaDBLinkedService + from .magento_linked_service_py3 import MagentoLinkedService + from .jira_linked_service_py3 import JiraLinkedService + from .impala_linked_service_py3 import ImpalaLinkedService + from .hubspot_linked_service_py3 import HubspotLinkedService + from .hive_linked_service_py3 import HiveLinkedService + from .hbase_linked_service_py3 import HBaseLinkedService + from .greenplum_linked_service_py3 import GreenplumLinkedService + from .google_big_query_linked_service_py3 import GoogleBigQueryLinkedService + from .eloqua_linked_service_py3 import EloquaLinkedService + from .drill_linked_service_py3 import DrillLinkedService + from .couchbase_linked_service_py3 import CouchbaseLinkedService + from .concur_linked_service_py3 import ConcurLinkedService + from .azure_postgre_sql_linked_service_py3 import AzurePostgreSqlLinkedService + from .amazon_mws_linked_service_py3 import AmazonMWSLinkedService + from .sap_hana_linked_service_py3 import SapHanaLinkedService + from .sap_bw_linked_service_py3 import SapBWLinkedService + from .sftp_server_linked_service_py3 import SftpServerLinkedService + from .ftp_server_linked_service_py3 import FtpServerLinkedService + from .http_linked_service_py3 import HttpLinkedService + from .azure_search_linked_service_py3 import AzureSearchLinkedService + from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService + from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService + from .amazon_s3_linked_service_py3 import AmazonS3LinkedService + from .rest_service_linked_service_py3 import RestServiceLinkedService + from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService + from .sap_ecc_linked_service_py3 import SapEccLinkedService + from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService + from .salesforce_linked_service_py3 import SalesforceLinkedService + from .office365_linked_service_py3 import Office365LinkedService + from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService + from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService + from .mongo_db_linked_service_py3 import MongoDbLinkedService + from .cassandra_linked_service_py3 import CassandraLinkedService + from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication + from .web_basic_authentication_py3 import WebBasicAuthentication + from .web_anonymous_authentication_py3 import WebAnonymousAuthentication + from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + from .web_linked_service_py3 import WebLinkedService + from .odata_linked_service_py3 import ODataLinkedService + from .hdfs_linked_service_py3 import HdfsLinkedService + from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService + from .informix_linked_service_py3 import InformixLinkedService + from .odbc_linked_service_py3 import OdbcLinkedService + from .azure_ml_linked_service_py3 import AzureMLLinkedService + from .teradata_linked_service_py3 import TeradataLinkedService + from .db2_linked_service_py3 import Db2LinkedService + from .sybase_linked_service_py3 import SybaseLinkedService + from .postgre_sql_linked_service_py3 import PostgreSqlLinkedService + from .my_sql_linked_service_py3 import MySqlLinkedService + from .azure_my_sql_linked_service_py3 import AzureMySqlLinkedService + from .oracle_linked_service_py3 import OracleLinkedService + from .file_server_linked_service_py3 import FileServerLinkedService + from .hd_insight_linked_service_py3 import HDInsightLinkedService + from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService + from .dynamics_linked_service_py3 import DynamicsLinkedService + from .cosmos_db_linked_service_py3 import CosmosDbLinkedService + from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService + from .azure_batch_linked_service_py3 import AzureBatchLinkedService + from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService + from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService + from .sql_server_linked_service_py3 import SqlServerLinkedService + from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService + from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService + from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService + from .azure_storage_linked_service_py3 import AzureStorageLinkedService + from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset + from .responsys_object_dataset_py3 import ResponsysObjectDataset + from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset + from .vertica_table_dataset_py3 import VerticaTableDataset + from .netezza_table_dataset_py3 import NetezzaTableDataset + from .zoho_object_dataset_py3 import ZohoObjectDataset + from .xero_object_dataset_py3 import XeroObjectDataset + from .square_object_dataset_py3 import SquareObjectDataset + from .spark_object_dataset_py3 import SparkObjectDataset + from .shopify_object_dataset_py3 import ShopifyObjectDataset + from .service_now_object_dataset_py3 import ServiceNowObjectDataset + from .quick_books_object_dataset_py3 import QuickBooksObjectDataset + from .presto_object_dataset_py3 import PrestoObjectDataset + from .phoenix_object_dataset_py3 import PhoenixObjectDataset + from .paypal_object_dataset_py3 import PaypalObjectDataset + from .marketo_object_dataset_py3 import MarketoObjectDataset + from .azure_maria_db_table_dataset_py3 import AzureMariaDBTableDataset + from .maria_db_table_dataset_py3 import MariaDBTableDataset + from .magento_object_dataset_py3 import MagentoObjectDataset + from .jira_object_dataset_py3 import JiraObjectDataset + from .impala_object_dataset_py3 import ImpalaObjectDataset + from .hubspot_object_dataset_py3 import HubspotObjectDataset + from .hive_object_dataset_py3 import HiveObjectDataset + from .hbase_object_dataset_py3 import HBaseObjectDataset + from .greenplum_table_dataset_py3 import GreenplumTableDataset + from .google_big_query_object_dataset_py3 import GoogleBigQueryObjectDataset + from .eloqua_object_dataset_py3 import EloquaObjectDataset + from .drill_table_dataset_py3 import DrillTableDataset + from .couchbase_table_dataset_py3 import CouchbaseTableDataset + from .concur_object_dataset_py3 import ConcurObjectDataset + from .azure_postgre_sql_table_dataset_py3 import AzurePostgreSqlTableDataset + from .amazon_mws_object_dataset_py3 import AmazonMWSObjectDataset + from .dataset_zip_deflate_compression_py3 import DatasetZipDeflateCompression + from .dataset_deflate_compression_py3 import DatasetDeflateCompression + from .dataset_gzip_compression_py3 import DatasetGZipCompression + from .dataset_bzip2_compression_py3 import DatasetBZip2Compression + from .dataset_compression_py3 import DatasetCompression + from .parquet_format_py3 import ParquetFormat + from .orc_format_py3 import OrcFormat + from .avro_format_py3 import AvroFormat + from .json_format_py3 import JsonFormat + from .text_format_py3 import TextFormat + from .dataset_storage_format_py3 import DatasetStorageFormat + from .http_dataset_py3 import HttpDataset + from .azure_search_index_dataset_py3 import AzureSearchIndexDataset + from .web_table_dataset_py3 import WebTableDataset + from .sap_table_resource_dataset_py3 import SapTableResourceDataset + from .rest_resource_dataset_py3 import RestResourceDataset + from .sql_server_table_dataset_py3 import SqlServerTableDataset + from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset + from .sap_hana_table_dataset_py3 import SapHanaTableDataset + from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset + from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset_py3 import SapBwCubeDataset + from .sybase_table_dataset_py3 import SybaseTableDataset + from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset + from .salesforce_object_dataset_py3 import SalesforceObjectDataset + from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset_py3 import PostgreSqlTableDataset + from .my_sql_table_dataset_py3 import MySqlTableDataset + from .odbc_table_dataset_py3 import OdbcTableDataset + from .informix_table_dataset_py3 import InformixTableDataset + from .relational_table_dataset_py3 import RelationalTableDataset + from .db2_table_dataset_py3 import Db2TableDataset + from .amazon_redshift_table_dataset_py3 import AmazonRedshiftTableDataset + from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .teradata_table_dataset_py3 import TeradataTableDataset + from .oracle_table_dataset_py3 import OracleTableDataset + from .odata_resource_dataset_py3 import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset + from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset + from .file_share_dataset_py3 import FileShareDataset + from .office365_dataset_py3 import Office365Dataset + from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset + from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset + from .dynamics_entity_dataset_py3 import DynamicsEntityDataset + from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset + from .custom_dataset_py3 import CustomDataset + from .cassandra_table_dataset_py3 import CassandraTableDataset + from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset + from .azure_sql_table_dataset_py3 import AzureSqlTableDataset + from .azure_table_dataset_py3 import AzureTableDataset + from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .binary_dataset_py3 import BinaryDataset + from .json_dataset_py3 import JsonDataset + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset + from .avro_dataset_py3 import AvroDataset + from .amazon_s3_dataset_py3 import AmazonS3Dataset + from .activity_policy_py3 import ActivityPolicy + from .azure_function_activity_py3 import AzureFunctionActivity + from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity + from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity + from .databricks_notebook_activity_py3 import DatabricksNotebookActivity + from .data_lake_analytics_usql_activity_py3 import DataLakeAnalyticsUSQLActivity + from .azure_ml_update_resource_activity_py3 import AzureMLUpdateResourceActivity + from .azure_ml_web_service_file_py3 import AzureMLWebServiceFile + from .azure_ml_batch_execution_activity_py3 import AzureMLBatchExecutionActivity + from .get_metadata_activity_py3 import GetMetadataActivity + from .web_activity_authentication_py3 import WebActivityAuthentication + from .web_activity_py3 import WebActivity + from .redshift_unload_settings_py3 import RedshiftUnloadSettings + from .amazon_redshift_source_py3 import AmazonRedshiftSource + from .google_ad_words_source_py3 import GoogleAdWordsSource + from .oracle_service_cloud_source_py3 import OracleServiceCloudSource + from .dynamics_ax_source_py3 import DynamicsAXSource + from .responsys_source_py3 import ResponsysSource + from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource + from .vertica_source_py3 import VerticaSource + from .netezza_partition_settings_py3 import NetezzaPartitionSettings + from .netezza_source_py3 import NetezzaSource + from .zoho_source_py3 import ZohoSource + from .xero_source_py3 import XeroSource + from .square_source_py3 import SquareSource + from .spark_source_py3 import SparkSource + from .shopify_source_py3 import ShopifySource + from .service_now_source_py3 import ServiceNowSource + from .quick_books_source_py3 import QuickBooksSource + from .presto_source_py3 import PrestoSource + from .phoenix_source_py3 import PhoenixSource + from .paypal_source_py3 import PaypalSource + from .marketo_source_py3 import MarketoSource + from .azure_maria_db_source_py3 import AzureMariaDBSource + from .maria_db_source_py3 import MariaDBSource + from .magento_source_py3 import MagentoSource + from .jira_source_py3 import JiraSource + from .impala_source_py3 import ImpalaSource + from .hubspot_source_py3 import HubspotSource + from .hive_source_py3 import HiveSource + from .hbase_source_py3 import HBaseSource + from .greenplum_source_py3 import GreenplumSource + from .google_big_query_source_py3 import GoogleBigQuerySource + from .eloqua_source_py3 import EloquaSource + from .drill_source_py3 import DrillSource + from .couchbase_source_py3 import CouchbaseSource + from .concur_source_py3 import ConcurSource + from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource + from .amazon_mws_source_py3 import AmazonMWSSource + from .http_source_py3 import HttpSource + from .azure_blob_fs_source_py3 import AzureBlobFSSource + from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource + from .office365_source_py3 import Office365Source + from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource + from .mongo_db_v2_source_py3 import MongoDbV2Source + from .mongo_db_source_py3 import MongoDbSource + from .cassandra_source_py3 import CassandraSource + from .web_source_py3 import WebSource + from .teradata_partition_settings_py3 import TeradataPartitionSettings + from .teradata_source_py3 import TeradataSource + from .oracle_partition_settings_py3 import OraclePartitionSettings + from .oracle_source_py3 import OracleSource + from .azure_data_explorer_source_py3 import AzureDataExplorerSource + from .azure_my_sql_source_py3 import AzureMySqlSource + from .distcp_settings_py3 import DistcpSettings + from .hdfs_source_py3 import HdfsSource + from .file_system_source_py3 import FileSystemSource + from .sql_dw_source_py3 import SqlDWSource + from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .sql_mi_source_py3 import SqlMISource + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource + from .sql_source_py3 import SqlSource + from .rest_source_py3 import RestSource + from .sap_table_partition_settings_py3 import SapTablePartitionSettings + from .sap_table_source_py3 import SapTableSource + from .sap_open_hub_source_py3 import SapOpenHubSource + from .sap_hana_source_py3 import SapHanaSource + from .sap_ecc_source_py3 import SapEccSource + from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource + from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource + from .salesforce_source_py3 import SalesforceSource + from .odata_source_py3 import ODataSource + from .sap_bw_source_py3 import SapBwSource + from .sybase_source_py3 import SybaseSource + from .postgre_sql_source_py3 import PostgreSqlSource + from .my_sql_source_py3 import MySqlSource + from .odbc_source_py3 import OdbcSource + from .db2_source_py3 import Db2Source + from .microsoft_access_source_py3 import MicrosoftAccessSource + from .informix_source_py3 import InformixSource + from .relational_source_py3 import RelationalSource + from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource + from .dynamics_crm_source_py3 import DynamicsCrmSource + from .dynamics_source_py3 import DynamicsSource + from .document_db_collection_source_py3 import DocumentDbCollectionSource + from .blob_source_py3 import BlobSource + from .azure_table_source_py3 import AzureTableSource + from .hdfs_read_settings_py3 import HdfsReadSettings + from .http_read_settings_py3 import HttpReadSettings + from .sftp_read_settings_py3 import SftpReadSettings + from .ftp_read_settings_py3 import FtpReadSettings + from .file_server_read_settings_py3 import FileServerReadSettings + from .amazon_s3_read_settings_py3 import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings_py3 import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings + from .store_read_settings_py3 import StoreReadSettings + from .binary_source_py3 import BinarySource + from .json_source_py3 import JsonSource + from .format_read_settings_py3 import FormatReadSettings + from .delimited_text_read_settings_py3 import DelimitedTextReadSettings + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource + from .avro_source_py3 import AvroSource + from .copy_source_py3 import CopySource + from .lookup_activity_py3 import LookupActivity + from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity + from .log_storage_settings_py3 import LogStorageSettings + from .delete_activity_py3 import DeleteActivity + from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity + from .custom_activity_reference_object_py3 import CustomActivityReferenceObject + from .custom_activity_py3 import CustomActivity + from .ssis_access_credential_py3 import SSISAccessCredential + from .ssis_log_location_py3 import SSISLogLocation + from .ssis_property_override_py3 import SSISPropertyOverride + from .ssis_execution_parameter_py3 import SSISExecutionParameter + from .ssis_execution_credential_py3 import SSISExecutionCredential + from .ssis_package_location_py3 import SSISPackageLocation + from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity + from .hd_insight_spark_activity_py3 import HDInsightSparkActivity + from .hd_insight_streaming_activity_py3 import HDInsightStreamingActivity + from .hd_insight_map_reduce_activity_py3 import HDInsightMapReduceActivity + from .hd_insight_pig_activity_py3 import HDInsightPigActivity + from .hd_insight_hive_activity_py3 import HDInsightHiveActivity + from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings + from .staging_settings_py3 import StagingSettings + from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink + from .salesforce_sink_py3 import SalesforceSink + from .azure_data_explorer_sink_py3 import AzureDataExplorerSink + from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink + from .dynamics_crm_sink_py3 import DynamicsCrmSink + from .dynamics_sink_py3 import DynamicsSink + from .microsoft_access_sink_py3 import MicrosoftAccessSink + from .informix_sink_py3 import InformixSink + from .odbc_sink_py3 import OdbcSink + from .azure_search_index_sink_py3 import AzureSearchIndexSink + from .azure_blob_fs_sink_py3 import AzureBlobFSSink + from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink + from .oracle_sink_py3 import OracleSink + from .polybase_settings_py3 import PolybaseSettings + from .sql_dw_sink_py3 import SqlDWSink + from .sql_mi_sink_py3 import SqlMISink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink + from .sql_sink_py3 import SqlSink + from .document_db_collection_sink_py3 import DocumentDbCollectionSink + from .file_system_sink_py3 import FileSystemSink + from .blob_sink_py3 import BlobSink + from .file_server_write_settings_py3 import FileServerWriteSettings + from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings + from .store_write_settings_py3 import StoreWriteSettings + from .binary_sink_py3 import BinarySink + from .parquet_sink_py3 import ParquetSink + from .json_write_settings_py3 import JsonWriteSettings + from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings + from .format_write_settings_py3 import FormatWriteSettings + from .avro_write_settings_py3 import AvroWriteSettings + from .avro_sink_py3 import AvroSink + from .azure_table_sink_py3 import AzureTableSink + from .azure_queue_sink_py3 import AzureQueueSink + from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .azure_my_sql_sink_py3 import AzureMySqlSink + from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink + from .json_sink_py3 import JsonSink + from .delimited_text_sink_py3 import DelimitedTextSink + from .copy_sink_py3 import CopySink + from .copy_activity_py3 import CopyActivity + from .execution_activity_py3 import ExecutionActivity + from .web_hook_activity_py3 import WebHookActivity + from .append_variable_activity_py3 import AppendVariableActivity + from .set_variable_activity_py3 import SetVariableActivity + from .filter_activity_py3 import FilterActivity + from .validation_activity_py3 import ValidationActivity + from .until_activity_py3 import UntilActivity + from .wait_activity_py3 import WaitActivity + from .for_each_activity_py3 import ForEachActivity + from .if_condition_activity_py3 import IfConditionActivity + from .execute_pipeline_activity_py3 import ExecutePipelineActivity + from .control_activity_py3 import ControlActivity + from .linked_integration_runtime_py3 import LinkedIntegrationRuntime + from .self_hosted_integration_runtime_node_py3 import SelfHostedIntegrationRuntimeNode + from .self_hosted_integration_runtime_status_py3 import SelfHostedIntegrationRuntimeStatus + from .managed_integration_runtime_operation_result_py3 import ManagedIntegrationRuntimeOperationResult + from .managed_integration_runtime_error_py3 import ManagedIntegrationRuntimeError + from .managed_integration_runtime_node_py3 import ManagedIntegrationRuntimeNode + from .managed_integration_runtime_status_py3 import ManagedIntegrationRuntimeStatus + from .linked_integration_runtime_rbac_authorization_py3 import LinkedIntegrationRuntimeRbacAuthorization + from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization + from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties + from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties + from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo + from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties + from .integration_runtime_vnet_properties_py3 import IntegrationRuntimeVNetProperties + from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties + from .managed_integration_runtime_py3 import ManagedIntegrationRuntime + from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress + from .ssis_variable_py3 import SsisVariable + from .ssis_environment_py3 import SsisEnvironment + from .ssis_parameter_py3 import SsisParameter + from .ssis_package_py3 import SsisPackage + from .ssis_environment_reference_py3 import SsisEnvironmentReference + from .ssis_project_py3 import SsisProject + from .ssis_folder_py3 import SsisFolder + from .ssis_object_metadata_py3 import SsisObjectMetadata + from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse + from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData + from .integration_runtime_monitoring_data_py3 import IntegrationRuntimeMonitoringData + from .integration_runtime_auth_keys_py3 import IntegrationRuntimeAuthKeys + from .integration_runtime_regenerate_key_parameters_py3 import IntegrationRuntimeRegenerateKeyParameters + from .integration_runtime_connection_info_py3 import IntegrationRuntimeConnectionInfo except (SyntaxError, ImportError): - from ._models import AccessPolicyResponse - from ._models import Activity - from ._models import ActivityDependency - from ._models import ActivityPolicy - from ._models import ActivityRun - from ._models import ActivityRunsQueryResponse - from ._models import AmazonMWSLinkedService - from ._models import AmazonMWSObjectDataset - from ._models import AmazonMWSSource - from ._models import AmazonRedshiftLinkedService - from ._models import AmazonRedshiftSource - from ._models import AmazonRedshiftTableDataset - from ._models import AmazonS3Dataset - from ._models import AmazonS3LinkedService - from ._models import AmazonS3Location - from ._models import AmazonS3ReadSettings - from ._models import AppendVariableActivity - from ._models import AvroDataset - from ._models import AvroFormat - from ._models import AvroSink - from ._models import AvroSource - from ._models import AvroWriteSettings - from ._models import AzureBatchLinkedService - from ._models import AzureBlobDataset - from ._models import AzureBlobFSDataset - from ._models import AzureBlobFSLinkedService - from ._models import AzureBlobFSLocation - from ._models import AzureBlobFSReadSettings - from ._models import AzureBlobFSSink - from ._models import AzureBlobFSSource - from ._models import AzureBlobFSWriteSettings - from ._models import AzureBlobStorageLinkedService - from ._models import AzureBlobStorageLocation - from ._models import AzureBlobStorageReadSettings - from ._models import AzureBlobStorageWriteSettings - from ._models import AzureDatabricksLinkedService - from ._models import AzureDataExplorerCommandActivity - from ._models import AzureDataExplorerLinkedService - from ._models import AzureDataExplorerSink - from ._models import AzureDataExplorerSource - from ._models import AzureDataExplorerTableDataset - from ._models import AzureDataLakeAnalyticsLinkedService - from ._models import AzureDataLakeStoreDataset - from ._models import AzureDataLakeStoreLinkedService - from ._models import AzureDataLakeStoreLocation - from ._models import AzureDataLakeStoreReadSettings - from ._models import AzureDataLakeStoreSink - from ._models import AzureDataLakeStoreSource - from ._models import AzureDataLakeStoreWriteSettings - from ._models import AzureFunctionActivity - from ._models import AzureFunctionLinkedService - from ._models import AzureKeyVaultLinkedService - from ._models import AzureKeyVaultSecretReference - from ._models import AzureMariaDBLinkedService - from ._models import AzureMariaDBSource - from ._models import AzureMariaDBTableDataset - from ._models import AzureMLBatchExecutionActivity - from ._models import AzureMLLinkedService - from ._models import AzureMLUpdateResourceActivity - from ._models import AzureMLWebServiceFile - from ._models import AzureMySqlLinkedService - from ._models import AzureMySqlSink - from ._models import AzureMySqlSource - from ._models import AzureMySqlTableDataset - from ._models import AzurePostgreSqlLinkedService - from ._models import AzurePostgreSqlSink - from ._models import AzurePostgreSqlSource - from ._models import AzurePostgreSqlTableDataset - from ._models import AzureQueueSink - from ._models import AzureSearchIndexDataset - from ._models import AzureSearchIndexSink - from ._models import AzureSearchLinkedService - from ._models import AzureSqlDatabaseLinkedService - from ._models import AzureSqlDWLinkedService - from ._models import AzureSqlDWTableDataset - from ._models import AzureSqlMILinkedService - from ._models import AzureSqlMITableDataset - from ._models import AzureSqlSink - from ._models import AzureSqlSource - from ._models import AzureSqlTableDataset - from ._models import AzureStorageLinkedService - from ._models import AzureTableDataset - from ._models import AzureTableSink - from ._models import AzureTableSource - from ._models import AzureTableStorageLinkedService - from ._models import BinaryDataset - from ._models import BinarySink - from ._models import BinarySource - from ._models import BlobEventsTrigger - from ._models import BlobSink - from ._models import BlobSource - from ._models import BlobTrigger - from ._models import CassandraLinkedService - from ._models import CassandraSource - from ._models import CassandraTableDataset - from ._models import ChainingTrigger - from ._models import CommonDataServiceForAppsEntityDataset - from ._models import CommonDataServiceForAppsLinkedService - from ._models import CommonDataServiceForAppsSink - from ._models import CommonDataServiceForAppsSource - from ._models import ConcurLinkedService - from ._models import ConcurObjectDataset - from ._models import ConcurSource - from ._models import ControlActivity - from ._models import CopyActivity - from ._models import CopySink - from ._models import CopySource - from ._models import CosmosDbLinkedService - from ._models import CosmosDbMongoDbApiCollectionDataset - from ._models import CosmosDbMongoDbApiLinkedService - from ._models import CosmosDbMongoDbApiSink - from ._models import CosmosDbMongoDbApiSource - from ._models import CouchbaseLinkedService - from ._models import CouchbaseSource - from ._models import CouchbaseTableDataset - from ._models import CreateLinkedIntegrationRuntimeRequest - from ._models import CreateRunResponse - from ._models import CustomActivity - from ._models import CustomActivityReferenceObject - from ._models import CustomDataset - from ._models import CustomDataSourceLinkedService - from ._models import DatabricksNotebookActivity - from ._models import DatabricksSparkJarActivity - from ._models import DatabricksSparkPythonActivity - from ._models import DataLakeAnalyticsUSQLActivity - from ._models import Dataset - from ._models import DatasetBZip2Compression - from ._models import DatasetCompression - from ._models import DatasetDeflateCompression - from ._models import DatasetFolder - from ._models import DatasetGZipCompression - from ._models import DatasetLocation - from ._models import DatasetReference - from ._models import DatasetResource - from ._models import DatasetStorageFormat - from ._models import DatasetZipDeflateCompression - from ._models import Db2LinkedService - from ._models import Db2Source - from ._models import Db2TableDataset - from ._models import DeleteActivity - from ._models import DelimitedTextDataset - from ._models import DelimitedTextReadSettings - from ._models import DelimitedTextSink - from ._models import DelimitedTextSource - from ._models import DelimitedTextWriteSettings - from ._models import DependencyReference - from ._models import DistcpSettings - from ._models import DocumentDbCollectionDataset - from ._models import DocumentDbCollectionSink - from ._models import DocumentDbCollectionSource - from ._models import DrillLinkedService - from ._models import DrillSource - from ._models import DrillTableDataset - from ._models import DynamicsAXLinkedService - from ._models import DynamicsAXResourceDataset - from ._models import DynamicsAXSource - from ._models import DynamicsCrmEntityDataset - from ._models import DynamicsCrmLinkedService - from ._models import DynamicsCrmSink - from ._models import DynamicsCrmSource - from ._models import DynamicsEntityDataset - from ._models import DynamicsLinkedService - from ._models import DynamicsSink - from ._models import DynamicsSource - from ._models import EloquaLinkedService - from ._models import EloquaObjectDataset - from ._models import EloquaSource - from ._models import EntityReference - from ._models import ExecutePipelineActivity - from ._models import ExecuteSSISPackageActivity - from ._models import ExecutionActivity - from ._models import ExposureControlRequest - from ._models import ExposureControlResponse - from ._models import Expression - from ._models import Factory - from ._models import FactoryGitHubConfiguration - from ._models import FactoryIdentity - from ._models import FactoryRepoConfiguration - from ._models import FactoryRepoUpdate - from ._models import FactoryUpdateParameters - from ._models import FactoryVSTSConfiguration - from ._models import FileServerLinkedService - from ._models import FileServerLocation - from ._models import FileServerReadSettings - from ._models import FileServerWriteSettings - from ._models import FileShareDataset - from ._models import FileSystemSink - from ._models import FileSystemSource - from ._models import FilterActivity - from ._models import ForEachActivity - from ._models import FormatReadSettings - from ._models import FormatWriteSettings - from ._models import FtpReadSettings - from ._models import FtpServerLinkedService - from ._models import FtpServerLocation - from ._models import GetMetadataActivity - from ._models import GetSsisObjectMetadataRequest - from ._models import GitHubAccessTokenRequest - from ._models import GitHubAccessTokenResponse - from ._models import GoogleAdWordsLinkedService - from ._models import GoogleAdWordsObjectDataset - from ._models import GoogleAdWordsSource - from ._models import GoogleBigQueryLinkedService - from ._models import GoogleBigQueryObjectDataset - from ._models import GoogleBigQuerySource - from ._models import GreenplumLinkedService - from ._models import GreenplumSource - from ._models import GreenplumTableDataset - from ._models import HBaseLinkedService - from ._models import HBaseObjectDataset - from ._models import HBaseSource - from ._models import HdfsLinkedService - from ._models import HdfsLocation - from ._models import HdfsReadSettings - from ._models import HdfsSource - from ._models import HDInsightHiveActivity - from ._models import HDInsightLinkedService - from ._models import HDInsightMapReduceActivity - from ._models import HDInsightOnDemandLinkedService - from ._models import HDInsightPigActivity - from ._models import HDInsightSparkActivity - from ._models import HDInsightStreamingActivity - from ._models import HiveLinkedService - from ._models import HiveObjectDataset - from ._models import HiveSource - from ._models import HttpDataset - from ._models import HttpLinkedService - from ._models import HttpReadSettings - from ._models import HttpServerLocation - from ._models import HttpSource - from ._models import HubspotLinkedService - from ._models import HubspotObjectDataset - from ._models import HubspotSource - from ._models import IfConditionActivity - from ._models import ImpalaLinkedService - from ._models import ImpalaObjectDataset - from ._models import ImpalaSource - from ._models import InformixLinkedService - from ._models import InformixSink - from ._models import InformixSource - from ._models import InformixTableDataset - from ._models import IntegrationRuntime - from ._models import IntegrationRuntimeAuthKeys - from ._models import IntegrationRuntimeComputeProperties - from ._models import IntegrationRuntimeConnectionInfo - from ._models import IntegrationRuntimeCustomSetupScriptProperties - from ._models import IntegrationRuntimeDataProxyProperties - from ._models import IntegrationRuntimeMonitoringData - from ._models import IntegrationRuntimeNodeIpAddress - from ._models import IntegrationRuntimeNodeMonitoringData - from ._models import IntegrationRuntimeReference - from ._models import IntegrationRuntimeRegenerateKeyParameters - from ._models import IntegrationRuntimeResource - from ._models import IntegrationRuntimeSsisCatalogInfo - from ._models import IntegrationRuntimeSsisProperties - from ._models import IntegrationRuntimeStatus - from ._models import IntegrationRuntimeStatusListResponse - from ._models import IntegrationRuntimeStatusResponse - from ._models import IntegrationRuntimeVNetProperties - from ._models import JiraLinkedService - from ._models import JiraObjectDataset - from ._models import JiraSource - from ._models import JsonDataset - from ._models import JsonFormat - from ._models import JsonSink - from ._models import JsonSource - from ._models import JsonWriteSettings - from ._models import LinkedIntegrationRuntime - from ._models import LinkedIntegrationRuntimeKeyAuthorization - from ._models import LinkedIntegrationRuntimeRbacAuthorization - from ._models import LinkedIntegrationRuntimeRequest - from ._models import LinkedIntegrationRuntimeType - from ._models import LinkedService - from ._models import LinkedServiceReference - from ._models import LinkedServiceResource - from ._models import LogStorageSettings - from ._models import LookupActivity - from ._models import MagentoLinkedService - from ._models import MagentoObjectDataset - from ._models import MagentoSource - from ._models import ManagedIntegrationRuntime - from ._models import ManagedIntegrationRuntimeError - from ._models import ManagedIntegrationRuntimeNode - from ._models import ManagedIntegrationRuntimeOperationResult - from ._models import ManagedIntegrationRuntimeStatus - from ._models import MariaDBLinkedService - from ._models import MariaDBSource - from ._models import MariaDBTableDataset - from ._models import MarketoLinkedService - from ._models import MarketoObjectDataset - from ._models import MarketoSource - from ._models import MicrosoftAccessLinkedService - from ._models import MicrosoftAccessSink - from ._models import MicrosoftAccessSource - from ._models import MicrosoftAccessTableDataset - from ._models import MongoDbCollectionDataset - from ._models import MongoDbCursorMethodsProperties - from ._models import MongoDbLinkedService - from ._models import MongoDbSource - from ._models import MongoDbV2CollectionDataset - from ._models import MongoDbV2LinkedService - from ._models import MongoDbV2Source - from ._models import MultiplePipelineTrigger - from ._models import MySqlLinkedService - from ._models import MySqlSource - from ._models import MySqlTableDataset - from ._models import NetezzaLinkedService - from ._models import NetezzaPartitionSettings - from ._models import NetezzaSource - from ._models import NetezzaTableDataset - from ._models import ODataLinkedService - from ._models import ODataResourceDataset - from ._models import ODataSource - from ._models import OdbcLinkedService - from ._models import OdbcSink - from ._models import OdbcSource - from ._models import OdbcTableDataset - from ._models import Office365Dataset - from ._models import Office365LinkedService - from ._models import Office365Source - from ._models import Operation - from ._models import OperationDisplay - from ._models import OperationLogSpecification - from ._models import OperationMetricAvailability - from ._models import OperationMetricDimension - from ._models import OperationMetricSpecification - from ._models import OperationServiceSpecification - from ._models import OracleLinkedService - from ._models import OraclePartitionSettings - from ._models import OracleServiceCloudLinkedService - from ._models import OracleServiceCloudObjectDataset - from ._models import OracleServiceCloudSource - from ._models import OracleSink - from ._models import OracleSource - from ._models import OracleTableDataset - from ._models import OrcFormat - from ._models import ParameterSpecification - from ._models import ParquetDataset - from ._models import ParquetFormat - from ._models import ParquetSink - from ._models import ParquetSource - from ._models import PaypalLinkedService - from ._models import PaypalObjectDataset - from ._models import PaypalSource - from ._models import PhoenixLinkedService - from ._models import PhoenixObjectDataset - from ._models import PhoenixSource - from ._models import PipelineFolder - from ._models import PipelineReference - from ._models import PipelineResource - from ._models import PipelineRun - from ._models import PipelineRunInvokedBy - from ._models import PipelineRunsQueryResponse - from ._models import PolybaseSettings - from ._models import PostgreSqlLinkedService - from ._models import PostgreSqlSource - from ._models import PostgreSqlTableDataset - from ._models import PrestoLinkedService - from ._models import PrestoObjectDataset - from ._models import PrestoSource - from ._models import QuickBooksLinkedService - from ._models import QuickBooksObjectDataset - from ._models import QuickBooksSource - from ._models import RecurrenceSchedule - from ._models import RecurrenceScheduleOccurrence - from ._models import RedirectIncompatibleRowSettings - from ._models import RedshiftUnloadSettings - from ._models import RelationalSource - from ._models import RelationalTableDataset - from ._models import RerunTriggerResource - from ._models import RerunTumblingWindowTrigger - from ._models import RerunTumblingWindowTriggerActionParameters - from ._models import Resource - from ._models import ResponsysLinkedService - from ._models import ResponsysObjectDataset - from ._models import ResponsysSource - from ._models import RestResourceDataset - from ._models import RestServiceLinkedService - from ._models import RestSource - from ._models import RetryPolicy - from ._models import RunFilterParameters - from ._models import RunQueryFilter - from ._models import RunQueryOrderBy - from ._models import SalesforceLinkedService - from ._models import SalesforceMarketingCloudLinkedService - from ._models import SalesforceMarketingCloudObjectDataset - from ._models import SalesforceMarketingCloudSource - from ._models import SalesforceObjectDataset - from ._models import SalesforceServiceCloudLinkedService - from ._models import SalesforceServiceCloudObjectDataset - from ._models import SalesforceServiceCloudSink - from ._models import SalesforceServiceCloudSource - from ._models import SalesforceSink - from ._models import SalesforceSource - from ._models import SapBwCubeDataset - from ._models import SapBWLinkedService - from ._models import SapBwSource - from ._models import SapCloudForCustomerLinkedService - from ._models import SapCloudForCustomerResourceDataset - from ._models import SapCloudForCustomerSink - from ._models import SapCloudForCustomerSource - from ._models import SapEccLinkedService - from ._models import SapEccResourceDataset - from ._models import SapEccSource - from ._models import SapHanaLinkedService - from ._models import SapHanaSource - from ._models import SapHanaTableDataset - from ._models import SapOpenHubLinkedService - from ._models import SapOpenHubSource - from ._models import SapOpenHubTableDataset - from ._models import SapTableLinkedService - from ._models import SapTablePartitionSettings - from ._models import SapTableResourceDataset - from ._models import SapTableSource - from ._models import ScheduleTrigger - from ._models import ScheduleTriggerRecurrence - from ._models import ScriptAction - from ._models import SecretBase - from ._models import SecureString - from ._models import SelfDependencyTumblingWindowTriggerReference - from ._models import SelfHostedIntegrationRuntime - from ._models import SelfHostedIntegrationRuntimeNode - from ._models import SelfHostedIntegrationRuntimeStatus - from ._models import ServiceNowLinkedService - from ._models import ServiceNowObjectDataset - from ._models import ServiceNowSource - from ._models import SetVariableActivity - from ._models import SftpLocation - from ._models import SftpReadSettings - from ._models import SftpServerLinkedService - from ._models import ShopifyLinkedService - from ._models import ShopifyObjectDataset - from ._models import ShopifySource - from ._models import SparkLinkedService - from ._models import SparkObjectDataset - from ._models import SparkSource - from ._models import SqlDWSink - from ._models import SqlDWSource - from ._models import SqlMISink - from ._models import SqlMISource - from ._models import SqlServerLinkedService - from ._models import SqlServerSink - from ._models import SqlServerSource - from ._models import SqlServerStoredProcedureActivity - from ._models import SqlServerTableDataset - from ._models import SqlSink - from ._models import SqlSource - from ._models import SquareLinkedService - from ._models import SquareObjectDataset - from ._models import SquareSource - from ._models import SSISAccessCredential - from ._models import SsisEnvironment - from ._models import SsisEnvironmentReference - from ._models import SSISExecutionCredential - from ._models import SSISExecutionParameter - from ._models import SsisFolder - from ._models import SSISLogLocation - from ._models import SsisObjectMetadata - from ._models import SsisObjectMetadataListResponse - from ._models import SsisObjectMetadataStatusResponse - from ._models import SsisPackage - from ._models import SSISPackageLocation - from ._models import SsisParameter - from ._models import SsisProject - from ._models import SSISPropertyOverride - from ._models import SsisVariable - from ._models import StagingSettings - from ._models import StoredProcedureParameter - from ._models import StoreReadSettings - from ._models import StoreWriteSettings - from ._models import SubResource - from ._models import SybaseLinkedService - from ._models import SybaseSource - from ._models import SybaseTableDataset - from ._models import TeradataLinkedService - from ._models import TeradataPartitionSettings - from ._models import TeradataSource - from ._models import TeradataTableDataset - from ._models import TextFormat - from ._models import Trigger - from ._models import TriggerDependencyReference - from ._models import TriggerPipelineReference - from ._models import TriggerReference - from ._models import TriggerResource - from ._models import TriggerRun - from ._models import TriggerRunsQueryResponse - from ._models import TriggerSubscriptionOperationStatus - from ._models import TumblingWindowTrigger - from ._models import TumblingWindowTriggerDependencyReference - from ._models import UntilActivity - from ._models import UpdateIntegrationRuntimeNodeRequest - from ._models import UpdateIntegrationRuntimeRequest - from ._models import UserAccessPolicy - from ._models import UserProperty - from ._models import ValidationActivity - from ._models import VariableSpecification - from ._models import VerticaLinkedService - from ._models import VerticaSource - from ._models import VerticaTableDataset - from ._models import WaitActivity - from ._models import WebActivity - from ._models import WebActivityAuthentication - from ._models import WebAnonymousAuthentication - from ._models import WebBasicAuthentication - from ._models import WebClientCertificateAuthentication - from ._models import WebHookActivity - from ._models import WebLinkedService - from ._models import WebLinkedServiceTypeProperties - from ._models import WebSource - from ._models import WebTableDataset - from ._models import XeroLinkedService - from ._models import XeroObjectDataset - from ._models import XeroSource - from ._models import ZohoLinkedService - from ._models import ZohoObjectDataset - from ._models import ZohoSource -from ._paged_models import DatasetResourcePaged -from ._paged_models import FactoryPaged -from ._paged_models import IntegrationRuntimeResourcePaged -from ._paged_models import LinkedServiceResourcePaged -from ._paged_models import OperationPaged -from ._paged_models import PipelineResourcePaged -from ._paged_models import RerunTriggerResourcePaged -from ._paged_models import TriggerResourcePaged -from ._data_factory_management_client_enums import ( + from .resource import Resource + from .sub_resource import SubResource + from .expression import Expression + from .secure_string import SecureString + from .linked_service_reference import LinkedServiceReference + from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference + from .secret_base import SecretBase + from .factory_identity import FactoryIdentity + from .factory_repo_configuration import FactoryRepoConfiguration + from .factory import Factory + from .integration_runtime import IntegrationRuntime + from .integration_runtime_resource import IntegrationRuntimeResource + from .integration_runtime_reference import IntegrationRuntimeReference + from .integration_runtime_status import IntegrationRuntimeStatus + from .integration_runtime_status_response import IntegrationRuntimeStatusResponse + from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse + from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest + from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest + from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest + from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest + from .parameter_specification import ParameterSpecification + from .linked_service import LinkedService + from .linked_service_resource import LinkedServiceResource + from .dataset_folder import DatasetFolder + from .dataset import Dataset + from .dataset_resource import DatasetResource + from .activity_dependency import ActivityDependency + from .user_property import UserProperty + from .activity import Activity + from .variable_specification import VariableSpecification + from .pipeline_folder import PipelineFolder + from .pipeline_resource import PipelineResource + from .trigger import Trigger + from .trigger_resource import TriggerResource + from .create_run_response import CreateRunResponse + from .trigger_subscription_operation_status import TriggerSubscriptionOperationStatus + from .factory_vsts_configuration import FactoryVSTSConfiguration + from .factory_git_hub_configuration import FactoryGitHubConfiguration + from .factory_repo_update import FactoryRepoUpdate + from .git_hub_access_token_request import GitHubAccessTokenRequest + from .git_hub_access_token_response import GitHubAccessTokenResponse + from .user_access_policy import UserAccessPolicy + from .access_policy_response import AccessPolicyResponse + from .pipeline_reference import PipelineReference + from .trigger_pipeline_reference import TriggerPipelineReference + from .factory_update_parameters import FactoryUpdateParameters + from .dataset_reference import DatasetReference + from .run_query_filter import RunQueryFilter + from .run_query_order_by import RunQueryOrderBy + from .run_filter_parameters import RunFilterParameters + from .pipeline_run_invoked_by import PipelineRunInvokedBy + from .pipeline_run import PipelineRun + from .pipeline_runs_query_response import PipelineRunsQueryResponse + from .activity_run import ActivityRun + from .activity_runs_query_response import ActivityRunsQueryResponse + from .trigger_run import TriggerRun + from .trigger_runs_query_response import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger + from .rerun_trigger_resource import RerunTriggerResource + from .operation_display import OperationDisplay + from .operation_log_specification import OperationLogSpecification + from .operation_metric_availability import OperationMetricAvailability + from .operation_metric_dimension import OperationMetricDimension + from .operation_metric_specification import OperationMetricSpecification + from .operation_service_specification import OperationServiceSpecification + from .operation import Operation + from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest + from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse + from .exposure_control_request import ExposureControlRequest + from .exposure_control_response import ExposureControlResponse + from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference import TriggerReference + from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference import TriggerDependencyReference + from .dependency_reference import DependencyReference + from .retry_policy import RetryPolicy + from .tumbling_window_trigger import TumblingWindowTrigger + from .blob_events_trigger import BlobEventsTrigger + from .blob_trigger import BlobTrigger + from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence + from .recurrence_schedule import RecurrenceSchedule + from .schedule_trigger_recurrence import ScheduleTriggerRecurrence + from .schedule_trigger import ScheduleTrigger + from .multiple_pipeline_trigger import MultiplePipelineTrigger + from .azure_function_linked_service import AzureFunctionLinkedService + from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .sap_table_linked_service import SapTableLinkedService + from .google_ad_words_linked_service import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service import DynamicsAXLinkedService + from .responsys_linked_service import ResponsysLinkedService + from .azure_databricks_linked_service import AzureDatabricksLinkedService + from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService + from .script_action import ScriptAction + from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService + from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService + from .netezza_linked_service import NetezzaLinkedService + from .vertica_linked_service import VerticaLinkedService + from .zoho_linked_service import ZohoLinkedService + from .xero_linked_service import XeroLinkedService + from .square_linked_service import SquareLinkedService + from .spark_linked_service import SparkLinkedService + from .shopify_linked_service import ShopifyLinkedService + from .service_now_linked_service import ServiceNowLinkedService + from .quick_books_linked_service import QuickBooksLinkedService + from .presto_linked_service import PrestoLinkedService + from .phoenix_linked_service import PhoenixLinkedService + from .paypal_linked_service import PaypalLinkedService + from .marketo_linked_service import MarketoLinkedService + from .azure_maria_db_linked_service import AzureMariaDBLinkedService + from .maria_db_linked_service import MariaDBLinkedService + from .magento_linked_service import MagentoLinkedService + from .jira_linked_service import JiraLinkedService + from .impala_linked_service import ImpalaLinkedService + from .hubspot_linked_service import HubspotLinkedService + from .hive_linked_service import HiveLinkedService + from .hbase_linked_service import HBaseLinkedService + from .greenplum_linked_service import GreenplumLinkedService + from .google_big_query_linked_service import GoogleBigQueryLinkedService + from .eloqua_linked_service import EloquaLinkedService + from .drill_linked_service import DrillLinkedService + from .couchbase_linked_service import CouchbaseLinkedService + from .concur_linked_service import ConcurLinkedService + from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService + from .amazon_mws_linked_service import AmazonMWSLinkedService + from .sap_hana_linked_service import SapHanaLinkedService + from .sap_bw_linked_service import SapBWLinkedService + from .sftp_server_linked_service import SftpServerLinkedService + from .ftp_server_linked_service import FtpServerLinkedService + from .http_linked_service import HttpLinkedService + from .azure_search_linked_service import AzureSearchLinkedService + from .custom_data_source_linked_service import CustomDataSourceLinkedService + from .amazon_redshift_linked_service import AmazonRedshiftLinkedService + from .amazon_s3_linked_service import AmazonS3LinkedService + from .rest_service_linked_service import RestServiceLinkedService + from .sap_open_hub_linked_service import SapOpenHubLinkedService + from .sap_ecc_linked_service import SapEccLinkedService + from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService + from .salesforce_linked_service import SalesforceLinkedService + from .office365_linked_service import Office365LinkedService + from .azure_blob_fs_linked_service import AzureBlobFSLinkedService + from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service import MongoDbV2LinkedService + from .mongo_db_linked_service import MongoDbLinkedService + from .cassandra_linked_service import CassandraLinkedService + from .web_client_certificate_authentication import WebClientCertificateAuthentication + from .web_basic_authentication import WebBasicAuthentication + from .web_anonymous_authentication import WebAnonymousAuthentication + from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + from .web_linked_service import WebLinkedService + from .odata_linked_service import ODataLinkedService + from .hdfs_linked_service import HdfsLinkedService + from .microsoft_access_linked_service import MicrosoftAccessLinkedService + from .informix_linked_service import InformixLinkedService + from .odbc_linked_service import OdbcLinkedService + from .azure_ml_linked_service import AzureMLLinkedService + from .teradata_linked_service import TeradataLinkedService + from .db2_linked_service import Db2LinkedService + from .sybase_linked_service import SybaseLinkedService + from .postgre_sql_linked_service import PostgreSqlLinkedService + from .my_sql_linked_service import MySqlLinkedService + from .azure_my_sql_linked_service import AzureMySqlLinkedService + from .oracle_linked_service import OracleLinkedService + from .file_server_linked_service import FileServerLinkedService + from .hd_insight_linked_service import HDInsightLinkedService + from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service import DynamicsCrmLinkedService + from .dynamics_linked_service import DynamicsLinkedService + from .cosmos_db_linked_service import CosmosDbLinkedService + from .azure_key_vault_linked_service import AzureKeyVaultLinkedService + from .azure_batch_linked_service import AzureBatchLinkedService + from .azure_sql_mi_linked_service import AzureSqlMILinkedService + from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService + from .sql_server_linked_service import SqlServerLinkedService + from .azure_sql_dw_linked_service import AzureSqlDWLinkedService + from .azure_table_storage_linked_service import AzureTableStorageLinkedService + from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService + from .azure_storage_linked_service import AzureStorageLinkedService + from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset + from .responsys_object_dataset import ResponsysObjectDataset + from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset + from .vertica_table_dataset import VerticaTableDataset + from .netezza_table_dataset import NetezzaTableDataset + from .zoho_object_dataset import ZohoObjectDataset + from .xero_object_dataset import XeroObjectDataset + from .square_object_dataset import SquareObjectDataset + from .spark_object_dataset import SparkObjectDataset + from .shopify_object_dataset import ShopifyObjectDataset + from .service_now_object_dataset import ServiceNowObjectDataset + from .quick_books_object_dataset import QuickBooksObjectDataset + from .presto_object_dataset import PrestoObjectDataset + from .phoenix_object_dataset import PhoenixObjectDataset + from .paypal_object_dataset import PaypalObjectDataset + from .marketo_object_dataset import MarketoObjectDataset + from .azure_maria_db_table_dataset import AzureMariaDBTableDataset + from .maria_db_table_dataset import MariaDBTableDataset + from .magento_object_dataset import MagentoObjectDataset + from .jira_object_dataset import JiraObjectDataset + from .impala_object_dataset import ImpalaObjectDataset + from .hubspot_object_dataset import HubspotObjectDataset + from .hive_object_dataset import HiveObjectDataset + from .hbase_object_dataset import HBaseObjectDataset + from .greenplum_table_dataset import GreenplumTableDataset + from .google_big_query_object_dataset import GoogleBigQueryObjectDataset + from .eloqua_object_dataset import EloquaObjectDataset + from .drill_table_dataset import DrillTableDataset + from .couchbase_table_dataset import CouchbaseTableDataset + from .concur_object_dataset import ConcurObjectDataset + from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset + from .amazon_mws_object_dataset import AmazonMWSObjectDataset + from .dataset_zip_deflate_compression import DatasetZipDeflateCompression + from .dataset_deflate_compression import DatasetDeflateCompression + from .dataset_gzip_compression import DatasetGZipCompression + from .dataset_bzip2_compression import DatasetBZip2Compression + from .dataset_compression import DatasetCompression + from .parquet_format import ParquetFormat + from .orc_format import OrcFormat + from .avro_format import AvroFormat + from .json_format import JsonFormat + from .text_format import TextFormat + from .dataset_storage_format import DatasetStorageFormat + from .http_dataset import HttpDataset + from .azure_search_index_dataset import AzureSearchIndexDataset + from .web_table_dataset import WebTableDataset + from .sap_table_resource_dataset import SapTableResourceDataset + from .rest_resource_dataset import RestResourceDataset + from .sql_server_table_dataset import SqlServerTableDataset + from .sap_open_hub_table_dataset import SapOpenHubTableDataset + from .sap_hana_table_dataset import SapHanaTableDataset + from .sap_ecc_resource_dataset import SapEccResourceDataset + from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset import SapBwCubeDataset + from .sybase_table_dataset import SybaseTableDataset + from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset + from .salesforce_object_dataset import SalesforceObjectDataset + from .microsoft_access_table_dataset import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset import PostgreSqlTableDataset + from .my_sql_table_dataset import MySqlTableDataset + from .odbc_table_dataset import OdbcTableDataset + from .informix_table_dataset import InformixTableDataset + from .relational_table_dataset import RelationalTableDataset + from .db2_table_dataset import Db2TableDataset + from .amazon_redshift_table_dataset import AmazonRedshiftTableDataset + from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .teradata_table_dataset import TeradataTableDataset + from .oracle_table_dataset import OracleTableDataset + from .odata_resource_dataset import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset + from .mongo_db_collection_dataset import MongoDbCollectionDataset + from .file_share_dataset import FileShareDataset + from .office365_dataset import Office365Dataset + from .azure_blob_fs_dataset import AzureBlobFSDataset + from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset + from .dynamics_entity_dataset import DynamicsEntityDataset + from .document_db_collection_dataset import DocumentDbCollectionDataset + from .custom_dataset import CustomDataset + from .cassandra_table_dataset import CassandraTableDataset + from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset import AzureSqlMITableDataset + from .azure_sql_table_dataset import AzureSqlTableDataset + from .azure_table_dataset import AzureTableDataset + from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .binary_dataset import BinaryDataset + from .json_dataset import JsonDataset + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset + from .avro_dataset import AvroDataset + from .amazon_s3_dataset import AmazonS3Dataset + from .activity_policy import ActivityPolicy + from .azure_function_activity import AzureFunctionActivity + from .databricks_spark_python_activity import DatabricksSparkPythonActivity + from .databricks_spark_jar_activity import DatabricksSparkJarActivity + from .databricks_notebook_activity import DatabricksNotebookActivity + from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity + from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity + from .azure_ml_web_service_file import AzureMLWebServiceFile + from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity + from .get_metadata_activity import GetMetadataActivity + from .web_activity_authentication import WebActivityAuthentication + from .web_activity import WebActivity + from .redshift_unload_settings import RedshiftUnloadSettings + from .amazon_redshift_source import AmazonRedshiftSource + from .google_ad_words_source import GoogleAdWordsSource + from .oracle_service_cloud_source import OracleServiceCloudSource + from .dynamics_ax_source import DynamicsAXSource + from .responsys_source import ResponsysSource + from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource + from .vertica_source import VerticaSource + from .netezza_partition_settings import NetezzaPartitionSettings + from .netezza_source import NetezzaSource + from .zoho_source import ZohoSource + from .xero_source import XeroSource + from .square_source import SquareSource + from .spark_source import SparkSource + from .shopify_source import ShopifySource + from .service_now_source import ServiceNowSource + from .quick_books_source import QuickBooksSource + from .presto_source import PrestoSource + from .phoenix_source import PhoenixSource + from .paypal_source import PaypalSource + from .marketo_source import MarketoSource + from .azure_maria_db_source import AzureMariaDBSource + from .maria_db_source import MariaDBSource + from .magento_source import MagentoSource + from .jira_source import JiraSource + from .impala_source import ImpalaSource + from .hubspot_source import HubspotSource + from .hive_source import HiveSource + from .hbase_source import HBaseSource + from .greenplum_source import GreenplumSource + from .google_big_query_source import GoogleBigQuerySource + from .eloqua_source import EloquaSource + from .drill_source import DrillSource + from .couchbase_source import CouchbaseSource + from .concur_source import ConcurSource + from .azure_postgre_sql_source import AzurePostgreSqlSource + from .amazon_mws_source import AmazonMWSSource + from .http_source import HttpSource + from .azure_blob_fs_source import AzureBlobFSSource + from .azure_data_lake_store_source import AzureDataLakeStoreSource + from .office365_source import Office365Source + from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource + from .mongo_db_v2_source import MongoDbV2Source + from .mongo_db_source import MongoDbSource + from .cassandra_source import CassandraSource + from .web_source import WebSource + from .teradata_partition_settings import TeradataPartitionSettings + from .teradata_source import TeradataSource + from .oracle_partition_settings import OraclePartitionSettings + from .oracle_source import OracleSource + from .azure_data_explorer_source import AzureDataExplorerSource + from .azure_my_sql_source import AzureMySqlSource + from .distcp_settings import DistcpSettings + from .hdfs_source import HdfsSource + from .file_system_source import FileSystemSource + from .sql_dw_source import SqlDWSource + from .stored_procedure_parameter import StoredProcedureParameter + from .sql_mi_source import SqlMISource + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource + from .sql_source import SqlSource + from .rest_source import RestSource + from .sap_table_partition_settings import SapTablePartitionSettings + from .sap_table_source import SapTableSource + from .sap_open_hub_source import SapOpenHubSource + from .sap_hana_source import SapHanaSource + from .sap_ecc_source import SapEccSource + from .sap_cloud_for_customer_source import SapCloudForCustomerSource + from .salesforce_service_cloud_source import SalesforceServiceCloudSource + from .salesforce_source import SalesforceSource + from .odata_source import ODataSource + from .sap_bw_source import SapBwSource + from .sybase_source import SybaseSource + from .postgre_sql_source import PostgreSqlSource + from .my_sql_source import MySqlSource + from .odbc_source import OdbcSource + from .db2_source import Db2Source + from .microsoft_access_source import MicrosoftAccessSource + from .informix_source import InformixSource + from .relational_source import RelationalSource + from .common_data_service_for_apps_source import CommonDataServiceForAppsSource + from .dynamics_crm_source import DynamicsCrmSource + from .dynamics_source import DynamicsSource + from .document_db_collection_source import DocumentDbCollectionSource + from .blob_source import BlobSource + from .azure_table_source import AzureTableSource + from .hdfs_read_settings import HdfsReadSettings + from .http_read_settings import HttpReadSettings + from .sftp_read_settings import SftpReadSettings + from .ftp_read_settings import FtpReadSettings + from .file_server_read_settings import FileServerReadSettings + from .amazon_s3_read_settings import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings + from .store_read_settings import StoreReadSettings + from .binary_source import BinarySource + from .json_source import JsonSource + from .format_read_settings import FormatReadSettings + from .delimited_text_read_settings import DelimitedTextReadSettings + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource + from .avro_source import AvroSource + from .copy_source import CopySource + from .lookup_activity import LookupActivity + from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity + from .log_storage_settings import LogStorageSettings + from .delete_activity import DeleteActivity + from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity + from .custom_activity_reference_object import CustomActivityReferenceObject + from .custom_activity import CustomActivity + from .ssis_access_credential import SSISAccessCredential + from .ssis_log_location import SSISLogLocation + from .ssis_property_override import SSISPropertyOverride + from .ssis_execution_parameter import SSISExecutionParameter + from .ssis_execution_credential import SSISExecutionCredential + from .ssis_package_location import SSISPackageLocation + from .execute_ssis_package_activity import ExecuteSSISPackageActivity + from .hd_insight_spark_activity import HDInsightSparkActivity + from .hd_insight_streaming_activity import HDInsightStreamingActivity + from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity + from .hd_insight_pig_activity import HDInsightPigActivity + from .hd_insight_hive_activity import HDInsightHiveActivity + from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings + from .staging_settings import StagingSettings + from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink import SalesforceServiceCloudSink + from .salesforce_sink import SalesforceSink + from .azure_data_explorer_sink import AzureDataExplorerSink + from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink + from .dynamics_crm_sink import DynamicsCrmSink + from .dynamics_sink import DynamicsSink + from .microsoft_access_sink import MicrosoftAccessSink + from .informix_sink import InformixSink + from .odbc_sink import OdbcSink + from .azure_search_index_sink import AzureSearchIndexSink + from .azure_blob_fs_sink import AzureBlobFSSink + from .azure_data_lake_store_sink import AzureDataLakeStoreSink + from .oracle_sink import OracleSink + from .polybase_settings import PolybaseSettings + from .sql_dw_sink import SqlDWSink + from .sql_mi_sink import SqlMISink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink + from .sql_sink import SqlSink + from .document_db_collection_sink import DocumentDbCollectionSink + from .file_system_sink import FileSystemSink + from .blob_sink import BlobSink + from .file_server_write_settings import FileServerWriteSettings + from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings + from .store_write_settings import StoreWriteSettings + from .binary_sink import BinarySink + from .parquet_sink import ParquetSink + from .json_write_settings import JsonWriteSettings + from .delimited_text_write_settings import DelimitedTextWriteSettings + from .format_write_settings import FormatWriteSettings + from .avro_write_settings import AvroWriteSettings + from .avro_sink import AvroSink + from .azure_table_sink import AzureTableSink + from .azure_queue_sink import AzureQueueSink + from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .azure_my_sql_sink import AzureMySqlSink + from .azure_postgre_sql_sink import AzurePostgreSqlSink + from .json_sink import JsonSink + from .delimited_text_sink import DelimitedTextSink + from .copy_sink import CopySink + from .copy_activity import CopyActivity + from .execution_activity import ExecutionActivity + from .web_hook_activity import WebHookActivity + from .append_variable_activity import AppendVariableActivity + from .set_variable_activity import SetVariableActivity + from .filter_activity import FilterActivity + from .validation_activity import ValidationActivity + from .until_activity import UntilActivity + from .wait_activity import WaitActivity + from .for_each_activity import ForEachActivity + from .if_condition_activity import IfConditionActivity + from .execute_pipeline_activity import ExecutePipelineActivity + from .control_activity import ControlActivity + from .linked_integration_runtime import LinkedIntegrationRuntime + from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode + from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus + from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult + from .managed_integration_runtime_error import ManagedIntegrationRuntimeError + from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode + from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus + from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization + from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization + from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties + from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties + from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo + from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties + from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties + from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties + from .managed_integration_runtime import ManagedIntegrationRuntime + from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress + from .ssis_variable import SsisVariable + from .ssis_environment import SsisEnvironment + from .ssis_parameter import SsisParameter + from .ssis_package import SsisPackage + from .ssis_environment_reference import SsisEnvironmentReference + from .ssis_project import SsisProject + from .ssis_folder import SsisFolder + from .ssis_object_metadata import SsisObjectMetadata + from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse + from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData + from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData + from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys + from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters + from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo +from .operation_paged import OperationPaged +from .factory_paged import FactoryPaged +from .integration_runtime_resource_paged import IntegrationRuntimeResourcePaged +from .linked_service_resource_paged import LinkedServiceResourcePaged +from .dataset_resource_paged import DatasetResourcePaged +from .pipeline_resource_paged import PipelineResourcePaged +from .trigger_resource_paged import TriggerResourcePaged +from .rerun_trigger_resource_paged import RerunTriggerResourcePaged +from .data_factory_management_client_enums import ( IntegrationRuntimeState, IntegrationRuntimeAutoUpdate, ParameterType, @@ -1127,522 +1125,521 @@ ) __all__ = [ - 'AccessPolicyResponse', - 'Activity', + 'Resource', + 'SubResource', + 'Expression', + 'SecureString', + 'LinkedServiceReference', + 'AzureKeyVaultSecretReference', + 'SecretBase', + 'FactoryIdentity', + 'FactoryRepoConfiguration', + 'Factory', + 'IntegrationRuntime', + 'IntegrationRuntimeResource', + 'IntegrationRuntimeReference', + 'IntegrationRuntimeStatus', + 'IntegrationRuntimeStatusResponse', + 'IntegrationRuntimeStatusListResponse', + 'UpdateIntegrationRuntimeRequest', + 'UpdateIntegrationRuntimeNodeRequest', + 'LinkedIntegrationRuntimeRequest', + 'CreateLinkedIntegrationRuntimeRequest', + 'ParameterSpecification', + 'LinkedService', + 'LinkedServiceResource', + 'DatasetFolder', + 'Dataset', + 'DatasetResource', 'ActivityDependency', - 'ActivityPolicy', + 'UserProperty', + 'Activity', + 'VariableSpecification', + 'PipelineFolder', + 'PipelineResource', + 'Trigger', + 'TriggerResource', + 'CreateRunResponse', + 'TriggerSubscriptionOperationStatus', + 'FactoryVSTSConfiguration', + 'FactoryGitHubConfiguration', + 'FactoryRepoUpdate', + 'GitHubAccessTokenRequest', + 'GitHubAccessTokenResponse', + 'UserAccessPolicy', + 'AccessPolicyResponse', + 'PipelineReference', + 'TriggerPipelineReference', + 'FactoryUpdateParameters', + 'DatasetReference', + 'RunQueryFilter', + 'RunQueryOrderBy', + 'RunFilterParameters', + 'PipelineRunInvokedBy', + 'PipelineRun', + 'PipelineRunsQueryResponse', 'ActivityRun', 'ActivityRunsQueryResponse', + 'TriggerRun', + 'TriggerRunsQueryResponse', + 'RerunTumblingWindowTriggerActionParameters', + 'RerunTumblingWindowTrigger', + 'RerunTriggerResource', + 'OperationDisplay', + 'OperationLogSpecification', + 'OperationMetricAvailability', + 'OperationMetricDimension', + 'OperationMetricSpecification', + 'OperationServiceSpecification', + 'Operation', + 'GetSsisObjectMetadataRequest', + 'SsisObjectMetadataStatusResponse', + 'ExposureControlRequest', + 'ExposureControlResponse', + 'SelfDependencyTumblingWindowTriggerReference', + 'TriggerReference', + 'TumblingWindowTriggerDependencyReference', + 'TriggerDependencyReference', + 'DependencyReference', + 'RetryPolicy', + 'TumblingWindowTrigger', + 'BlobEventsTrigger', + 'BlobTrigger', + 'RecurrenceScheduleOccurrence', + 'RecurrenceSchedule', + 'ScheduleTriggerRecurrence', + 'ScheduleTrigger', + 'MultiplePipelineTrigger', + 'AzureFunctionLinkedService', + 'AzureDataExplorerLinkedService', + 'SapTableLinkedService', + 'GoogleAdWordsLinkedService', + 'OracleServiceCloudLinkedService', + 'DynamicsAXLinkedService', + 'ResponsysLinkedService', + 'AzureDatabricksLinkedService', + 'AzureDataLakeAnalyticsLinkedService', + 'ScriptAction', + 'HDInsightOnDemandLinkedService', + 'SalesforceMarketingCloudLinkedService', + 'NetezzaLinkedService', + 'VerticaLinkedService', + 'ZohoLinkedService', + 'XeroLinkedService', + 'SquareLinkedService', + 'SparkLinkedService', + 'ShopifyLinkedService', + 'ServiceNowLinkedService', + 'QuickBooksLinkedService', + 'PrestoLinkedService', + 'PhoenixLinkedService', + 'PaypalLinkedService', + 'MarketoLinkedService', + 'AzureMariaDBLinkedService', + 'MariaDBLinkedService', + 'MagentoLinkedService', + 'JiraLinkedService', + 'ImpalaLinkedService', + 'HubspotLinkedService', + 'HiveLinkedService', + 'HBaseLinkedService', + 'GreenplumLinkedService', + 'GoogleBigQueryLinkedService', + 'EloquaLinkedService', + 'DrillLinkedService', + 'CouchbaseLinkedService', + 'ConcurLinkedService', + 'AzurePostgreSqlLinkedService', 'AmazonMWSLinkedService', - 'AmazonMWSObjectDataset', - 'AmazonMWSSource', + 'SapHanaLinkedService', + 'SapBWLinkedService', + 'SftpServerLinkedService', + 'FtpServerLinkedService', + 'HttpLinkedService', + 'AzureSearchLinkedService', + 'CustomDataSourceLinkedService', 'AmazonRedshiftLinkedService', - 'AmazonRedshiftSource', - 'AmazonRedshiftTableDataset', - 'AmazonS3Dataset', 'AmazonS3LinkedService', - 'AmazonS3Location', - 'AmazonS3ReadSettings', - 'AppendVariableActivity', - 'AvroDataset', - 'AvroFormat', - 'AvroSink', - 'AvroSource', - 'AvroWriteSettings', - 'AzureBatchLinkedService', - 'AzureBlobDataset', - 'AzureBlobFSDataset', + 'RestServiceLinkedService', + 'SapOpenHubLinkedService', + 'SapEccLinkedService', + 'SapCloudForCustomerLinkedService', + 'SalesforceServiceCloudLinkedService', + 'SalesforceLinkedService', + 'Office365LinkedService', 'AzureBlobFSLinkedService', - 'AzureBlobFSLocation', - 'AzureBlobFSReadSettings', - 'AzureBlobFSSink', - 'AzureBlobFSSource', - 'AzureBlobFSWriteSettings', - 'AzureBlobStorageLinkedService', - 'AzureBlobStorageLocation', - 'AzureBlobStorageReadSettings', - 'AzureBlobStorageWriteSettings', - 'AzureDatabricksLinkedService', - 'AzureDataExplorerCommandActivity', - 'AzureDataExplorerLinkedService', - 'AzureDataExplorerSink', - 'AzureDataExplorerSource', - 'AzureDataExplorerTableDataset', - 'AzureDataLakeAnalyticsLinkedService', - 'AzureDataLakeStoreDataset', 'AzureDataLakeStoreLinkedService', - 'AzureDataLakeStoreLocation', - 'AzureDataLakeStoreReadSettings', - 'AzureDataLakeStoreSink', - 'AzureDataLakeStoreSource', - 'AzureDataLakeStoreWriteSettings', - 'AzureFunctionActivity', - 'AzureFunctionLinkedService', - 'AzureKeyVaultLinkedService', - 'AzureKeyVaultSecretReference', - 'AzureMariaDBLinkedService', - 'AzureMariaDBSource', - 'AzureMariaDBTableDataset', - 'AzureMLBatchExecutionActivity', + 'CosmosDbMongoDbApiLinkedService', + 'MongoDbV2LinkedService', + 'MongoDbLinkedService', + 'CassandraLinkedService', + 'WebClientCertificateAuthentication', + 'WebBasicAuthentication', + 'WebAnonymousAuthentication', + 'WebLinkedServiceTypeProperties', + 'WebLinkedService', + 'ODataLinkedService', + 'HdfsLinkedService', + 'MicrosoftAccessLinkedService', + 'InformixLinkedService', + 'OdbcLinkedService', 'AzureMLLinkedService', - 'AzureMLUpdateResourceActivity', - 'AzureMLWebServiceFile', + 'TeradataLinkedService', + 'Db2LinkedService', + 'SybaseLinkedService', + 'PostgreSqlLinkedService', + 'MySqlLinkedService', 'AzureMySqlLinkedService', - 'AzureMySqlSink', - 'AzureMySqlSource', - 'AzureMySqlTableDataset', - 'AzurePostgreSqlLinkedService', - 'AzurePostgreSqlSink', - 'AzurePostgreSqlSource', - 'AzurePostgreSqlTableDataset', - 'AzureQueueSink', - 'AzureSearchIndexDataset', - 'AzureSearchIndexSink', - 'AzureSearchLinkedService', + 'OracleLinkedService', + 'FileServerLinkedService', + 'HDInsightLinkedService', + 'CommonDataServiceForAppsLinkedService', + 'DynamicsCrmLinkedService', + 'DynamicsLinkedService', + 'CosmosDbLinkedService', + 'AzureKeyVaultLinkedService', + 'AzureBatchLinkedService', + 'AzureSqlMILinkedService', 'AzureSqlDatabaseLinkedService', + 'SqlServerLinkedService', 'AzureSqlDWLinkedService', - 'AzureSqlDWTableDataset', - 'AzureSqlMILinkedService', - 'AzureSqlMITableDataset', - 'AzureSqlSink', - 'AzureSqlSource', - 'AzureSqlTableDataset', - 'AzureStorageLinkedService', - 'AzureTableDataset', - 'AzureTableSink', - 'AzureTableSource', 'AzureTableStorageLinkedService', - 'BinaryDataset', - 'BinarySink', - 'BinarySource', - 'BlobEventsTrigger', - 'BlobSink', - 'BlobSource', - 'BlobTrigger', - 'CassandraLinkedService', - 'CassandraSource', - 'CassandraTableDataset', - 'ChainingTrigger', - 'CommonDataServiceForAppsEntityDataset', - 'CommonDataServiceForAppsLinkedService', - 'CommonDataServiceForAppsSink', - 'CommonDataServiceForAppsSource', - 'ConcurLinkedService', - 'ConcurObjectDataset', - 'ConcurSource', - 'ControlActivity', - 'CopyActivity', - 'CopySink', - 'CopySource', - 'CosmosDbLinkedService', - 'CosmosDbMongoDbApiCollectionDataset', - 'CosmosDbMongoDbApiLinkedService', - 'CosmosDbMongoDbApiSink', - 'CosmosDbMongoDbApiSource', - 'CouchbaseLinkedService', - 'CouchbaseSource', + 'AzureBlobStorageLinkedService', + 'AzureStorageLinkedService', + 'GoogleAdWordsObjectDataset', + 'AzureDataExplorerTableDataset', + 'OracleServiceCloudObjectDataset', + 'DynamicsAXResourceDataset', + 'ResponsysObjectDataset', + 'SalesforceMarketingCloudObjectDataset', + 'VerticaTableDataset', + 'NetezzaTableDataset', + 'ZohoObjectDataset', + 'XeroObjectDataset', + 'SquareObjectDataset', + 'SparkObjectDataset', + 'ShopifyObjectDataset', + 'ServiceNowObjectDataset', + 'QuickBooksObjectDataset', + 'PrestoObjectDataset', + 'PhoenixObjectDataset', + 'PaypalObjectDataset', + 'MarketoObjectDataset', + 'AzureMariaDBTableDataset', + 'MariaDBTableDataset', + 'MagentoObjectDataset', + 'JiraObjectDataset', + 'ImpalaObjectDataset', + 'HubspotObjectDataset', + 'HiveObjectDataset', + 'HBaseObjectDataset', + 'GreenplumTableDataset', + 'GoogleBigQueryObjectDataset', + 'EloquaObjectDataset', + 'DrillTableDataset', 'CouchbaseTableDataset', - 'CreateLinkedIntegrationRuntimeRequest', - 'CreateRunResponse', - 'CustomActivity', - 'CustomActivityReferenceObject', - 'CustomDataset', - 'CustomDataSourceLinkedService', - 'DatabricksNotebookActivity', - 'DatabricksSparkJarActivity', - 'DatabricksSparkPythonActivity', - 'DataLakeAnalyticsUSQLActivity', - 'Dataset', - 'DatasetBZip2Compression', - 'DatasetCompression', + 'ConcurObjectDataset', + 'AzurePostgreSqlTableDataset', + 'AmazonMWSObjectDataset', + 'DatasetZipDeflateCompression', 'DatasetDeflateCompression', - 'DatasetFolder', 'DatasetGZipCompression', - 'DatasetLocation', - 'DatasetReference', - 'DatasetResource', + 'DatasetBZip2Compression', + 'DatasetCompression', + 'ParquetFormat', + 'OrcFormat', + 'AvroFormat', + 'JsonFormat', + 'TextFormat', 'DatasetStorageFormat', - 'DatasetZipDeflateCompression', - 'Db2LinkedService', - 'Db2Source', + 'HttpDataset', + 'AzureSearchIndexDataset', + 'WebTableDataset', + 'SapTableResourceDataset', + 'RestResourceDataset', + 'SqlServerTableDataset', + 'SapOpenHubTableDataset', + 'SapHanaTableDataset', + 'SapEccResourceDataset', + 'SapCloudForCustomerResourceDataset', + 'SapBwCubeDataset', + 'SybaseTableDataset', + 'SalesforceServiceCloudObjectDataset', + 'SalesforceObjectDataset', + 'MicrosoftAccessTableDataset', + 'PostgreSqlTableDataset', + 'MySqlTableDataset', + 'OdbcTableDataset', + 'InformixTableDataset', + 'RelationalTableDataset', 'Db2TableDataset', - 'DeleteActivity', - 'DelimitedTextDataset', - 'DelimitedTextReadSettings', - 'DelimitedTextSink', - 'DelimitedTextSource', - 'DelimitedTextWriteSettings', - 'DependencyReference', - 'DistcpSettings', - 'DocumentDbCollectionDataset', - 'DocumentDbCollectionSink', - 'DocumentDbCollectionSource', - 'DrillLinkedService', - 'DrillSource', - 'DrillTableDataset', - 'DynamicsAXLinkedService', - 'DynamicsAXResourceDataset', - 'DynamicsAXSource', + 'AmazonRedshiftTableDataset', + 'AzureMySqlTableDataset', + 'TeradataTableDataset', + 'OracleTableDataset', + 'ODataResourceDataset', + 'CosmosDbMongoDbApiCollectionDataset', + 'MongoDbV2CollectionDataset', + 'MongoDbCollectionDataset', + 'FileShareDataset', + 'Office365Dataset', + 'AzureBlobFSDataset', + 'AzureDataLakeStoreDataset', + 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntityDataset', - 'DynamicsCrmLinkedService', - 'DynamicsCrmSink', - 'DynamicsCrmSource', 'DynamicsEntityDataset', - 'DynamicsLinkedService', - 'DynamicsSink', - 'DynamicsSource', - 'EloquaLinkedService', - 'EloquaObjectDataset', - 'EloquaSource', - 'EntityReference', - 'ExecutePipelineActivity', - 'ExecuteSSISPackageActivity', - 'ExecutionActivity', - 'ExposureControlRequest', - 'ExposureControlResponse', - 'Expression', - 'Factory', - 'FactoryGitHubConfiguration', - 'FactoryIdentity', - 'FactoryRepoConfiguration', - 'FactoryRepoUpdate', - 'FactoryUpdateParameters', - 'FactoryVSTSConfiguration', - 'FileServerLinkedService', - 'FileServerLocation', - 'FileServerReadSettings', - 'FileServerWriteSettings', - 'FileShareDataset', - 'FileSystemSink', - 'FileSystemSource', - 'FilterActivity', - 'ForEachActivity', - 'FormatReadSettings', - 'FormatWriteSettings', - 'FtpReadSettings', - 'FtpServerLinkedService', + 'DocumentDbCollectionDataset', + 'CustomDataset', + 'CassandraTableDataset', + 'AzureSqlDWTableDataset', + 'AzureSqlMITableDataset', + 'AzureSqlTableDataset', + 'AzureTableDataset', + 'AzureBlobDataset', + 'HdfsLocation', + 'HttpServerLocation', + 'SftpLocation', 'FtpServerLocation', + 'FileServerLocation', + 'AmazonS3Location', + 'AzureDataLakeStoreLocation', + 'AzureBlobFSLocation', + 'AzureBlobStorageLocation', + 'DatasetLocation', + 'BinaryDataset', + 'JsonDataset', + 'DelimitedTextDataset', + 'ParquetDataset', + 'AvroDataset', + 'AmazonS3Dataset', + 'ActivityPolicy', + 'AzureFunctionActivity', + 'DatabricksSparkPythonActivity', + 'DatabricksSparkJarActivity', + 'DatabricksNotebookActivity', + 'DataLakeAnalyticsUSQLActivity', + 'AzureMLUpdateResourceActivity', + 'AzureMLWebServiceFile', + 'AzureMLBatchExecutionActivity', 'GetMetadataActivity', - 'GetSsisObjectMetadataRequest', - 'GitHubAccessTokenRequest', - 'GitHubAccessTokenResponse', - 'GoogleAdWordsLinkedService', - 'GoogleAdWordsObjectDataset', + 'WebActivityAuthentication', + 'WebActivity', + 'RedshiftUnloadSettings', + 'AmazonRedshiftSource', 'GoogleAdWordsSource', - 'GoogleBigQueryLinkedService', - 'GoogleBigQueryObjectDataset', - 'GoogleBigQuerySource', - 'GreenplumLinkedService', - 'GreenplumSource', - 'GreenplumTableDataset', - 'HBaseLinkedService', - 'HBaseObjectDataset', - 'HBaseSource', - 'HdfsLinkedService', - 'HdfsLocation', - 'HdfsReadSettings', - 'HdfsSource', - 'HDInsightHiveActivity', - 'HDInsightLinkedService', - 'HDInsightMapReduceActivity', - 'HDInsightOnDemandLinkedService', - 'HDInsightPigActivity', - 'HDInsightSparkActivity', - 'HDInsightStreamingActivity', - 'HiveLinkedService', - 'HiveObjectDataset', + 'OracleServiceCloudSource', + 'DynamicsAXSource', + 'ResponsysSource', + 'SalesforceMarketingCloudSource', + 'VerticaSource', + 'NetezzaPartitionSettings', + 'NetezzaSource', + 'ZohoSource', + 'XeroSource', + 'SquareSource', + 'SparkSource', + 'ShopifySource', + 'ServiceNowSource', + 'QuickBooksSource', + 'PrestoSource', + 'PhoenixSource', + 'PaypalSource', + 'MarketoSource', + 'AzureMariaDBSource', + 'MariaDBSource', + 'MagentoSource', + 'JiraSource', + 'ImpalaSource', + 'HubspotSource', 'HiveSource', - 'HttpDataset', - 'HttpLinkedService', - 'HttpReadSettings', - 'HttpServerLocation', + 'HBaseSource', + 'GreenplumSource', + 'GoogleBigQuerySource', + 'EloquaSource', + 'DrillSource', + 'CouchbaseSource', + 'ConcurSource', + 'AzurePostgreSqlSource', + 'AmazonMWSSource', 'HttpSource', - 'HubspotLinkedService', - 'HubspotObjectDataset', - 'HubspotSource', - 'IfConditionActivity', - 'ImpalaLinkedService', - 'ImpalaObjectDataset', - 'ImpalaSource', - 'InformixLinkedService', - 'InformixSink', - 'InformixSource', - 'InformixTableDataset', - 'IntegrationRuntime', - 'IntegrationRuntimeAuthKeys', - 'IntegrationRuntimeComputeProperties', - 'IntegrationRuntimeConnectionInfo', - 'IntegrationRuntimeCustomSetupScriptProperties', - 'IntegrationRuntimeDataProxyProperties', - 'IntegrationRuntimeMonitoringData', - 'IntegrationRuntimeNodeIpAddress', - 'IntegrationRuntimeNodeMonitoringData', - 'IntegrationRuntimeReference', - 'IntegrationRuntimeRegenerateKeyParameters', - 'IntegrationRuntimeResource', - 'IntegrationRuntimeSsisCatalogInfo', - 'IntegrationRuntimeSsisProperties', - 'IntegrationRuntimeStatus', - 'IntegrationRuntimeStatusListResponse', - 'IntegrationRuntimeStatusResponse', - 'IntegrationRuntimeVNetProperties', - 'JiraLinkedService', - 'JiraObjectDataset', - 'JiraSource', - 'JsonDataset', - 'JsonFormat', - 'JsonSink', - 'JsonSource', - 'JsonWriteSettings', - 'LinkedIntegrationRuntime', - 'LinkedIntegrationRuntimeKeyAuthorization', - 'LinkedIntegrationRuntimeRbacAuthorization', - 'LinkedIntegrationRuntimeRequest', - 'LinkedIntegrationRuntimeType', - 'LinkedService', - 'LinkedServiceReference', - 'LinkedServiceResource', - 'LogStorageSettings', - 'LookupActivity', - 'MagentoLinkedService', - 'MagentoObjectDataset', - 'MagentoSource', - 'ManagedIntegrationRuntime', - 'ManagedIntegrationRuntimeError', - 'ManagedIntegrationRuntimeNode', - 'ManagedIntegrationRuntimeOperationResult', - 'ManagedIntegrationRuntimeStatus', - 'MariaDBLinkedService', - 'MariaDBSource', - 'MariaDBTableDataset', - 'MarketoLinkedService', - 'MarketoObjectDataset', - 'MarketoSource', - 'MicrosoftAccessLinkedService', - 'MicrosoftAccessSink', - 'MicrosoftAccessSource', - 'MicrosoftAccessTableDataset', - 'MongoDbCollectionDataset', + 'AzureBlobFSSource', + 'AzureDataLakeStoreSource', + 'Office365Source', 'MongoDbCursorMethodsProperties', - 'MongoDbLinkedService', - 'MongoDbSource', - 'MongoDbV2CollectionDataset', - 'MongoDbV2LinkedService', + 'CosmosDbMongoDbApiSource', 'MongoDbV2Source', - 'MultiplePipelineTrigger', - 'MySqlLinkedService', - 'MySqlSource', - 'MySqlTableDataset', - 'NetezzaLinkedService', - 'NetezzaPartitionSettings', - 'NetezzaSource', - 'NetezzaTableDataset', - 'ODataLinkedService', - 'ODataResourceDataset', - 'ODataSource', - 'OdbcLinkedService', - 'OdbcSink', - 'OdbcSource', - 'OdbcTableDataset', - 'Office365Dataset', - 'Office365LinkedService', - 'Office365Source', - 'Operation', - 'OperationDisplay', - 'OperationLogSpecification', - 'OperationMetricAvailability', - 'OperationMetricDimension', - 'OperationMetricSpecification', - 'OperationServiceSpecification', - 'OracleLinkedService', + 'MongoDbSource', + 'CassandraSource', + 'WebSource', + 'TeradataPartitionSettings', + 'TeradataSource', 'OraclePartitionSettings', - 'OracleServiceCloudLinkedService', - 'OracleServiceCloudObjectDataset', - 'OracleServiceCloudSource', - 'OracleSink', 'OracleSource', - 'OracleTableDataset', - 'OrcFormat', - 'ParameterSpecification', - 'ParquetDataset', - 'ParquetFormat', - 'ParquetSink', - 'ParquetSource', - 'PaypalLinkedService', - 'PaypalObjectDataset', - 'PaypalSource', - 'PhoenixLinkedService', - 'PhoenixObjectDataset', - 'PhoenixSource', - 'PipelineFolder', - 'PipelineReference', - 'PipelineResource', - 'PipelineRun', - 'PipelineRunInvokedBy', - 'PipelineRunsQueryResponse', - 'PolybaseSettings', - 'PostgreSqlLinkedService', - 'PostgreSqlSource', - 'PostgreSqlTableDataset', - 'PrestoLinkedService', - 'PrestoObjectDataset', - 'PrestoSource', - 'QuickBooksLinkedService', - 'QuickBooksObjectDataset', - 'QuickBooksSource', - 'RecurrenceSchedule', - 'RecurrenceScheduleOccurrence', - 'RedirectIncompatibleRowSettings', - 'RedshiftUnloadSettings', - 'RelationalSource', - 'RelationalTableDataset', - 'RerunTriggerResource', - 'RerunTumblingWindowTrigger', - 'RerunTumblingWindowTriggerActionParameters', - 'Resource', - 'ResponsysLinkedService', - 'ResponsysObjectDataset', - 'ResponsysSource', - 'RestResourceDataset', - 'RestServiceLinkedService', - 'RestSource', - 'RetryPolicy', - 'RunFilterParameters', - 'RunQueryFilter', - 'RunQueryOrderBy', - 'SalesforceLinkedService', - 'SalesforceMarketingCloudLinkedService', - 'SalesforceMarketingCloudObjectDataset', - 'SalesforceMarketingCloudSource', - 'SalesforceObjectDataset', - 'SalesforceServiceCloudLinkedService', - 'SalesforceServiceCloudObjectDataset', - 'SalesforceServiceCloudSink', + 'AzureDataExplorerSource', + 'AzureMySqlSource', + 'DistcpSettings', + 'HdfsSource', + 'FileSystemSource', + 'SqlDWSource', + 'StoredProcedureParameter', + 'SqlMISource', + 'AzureSqlSource', + 'SqlServerSource', + 'SqlSource', + 'RestSource', + 'SapTablePartitionSettings', + 'SapTableSource', + 'SapOpenHubSource', + 'SapHanaSource', + 'SapEccSource', + 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource', - 'SalesforceSink', 'SalesforceSource', - 'SapBwCubeDataset', - 'SapBWLinkedService', + 'ODataSource', 'SapBwSource', - 'SapCloudForCustomerLinkedService', - 'SapCloudForCustomerResourceDataset', - 'SapCloudForCustomerSink', - 'SapCloudForCustomerSource', - 'SapEccLinkedService', - 'SapEccResourceDataset', - 'SapEccSource', - 'SapHanaLinkedService', - 'SapHanaSource', - 'SapHanaTableDataset', - 'SapOpenHubLinkedService', - 'SapOpenHubSource', - 'SapOpenHubTableDataset', - 'SapTableLinkedService', - 'SapTablePartitionSettings', - 'SapTableResourceDataset', - 'SapTableSource', - 'ScheduleTrigger', - 'ScheduleTriggerRecurrence', - 'ScriptAction', - 'SecretBase', - 'SecureString', - 'SelfDependencyTumblingWindowTriggerReference', - 'SelfHostedIntegrationRuntime', - 'SelfHostedIntegrationRuntimeNode', - 'SelfHostedIntegrationRuntimeStatus', - 'ServiceNowLinkedService', - 'ServiceNowObjectDataset', - 'ServiceNowSource', - 'SetVariableActivity', - 'SftpLocation', + 'SybaseSource', + 'PostgreSqlSource', + 'MySqlSource', + 'OdbcSource', + 'Db2Source', + 'MicrosoftAccessSource', + 'InformixSource', + 'RelationalSource', + 'CommonDataServiceForAppsSource', + 'DynamicsCrmSource', + 'DynamicsSource', + 'DocumentDbCollectionSource', + 'BlobSource', + 'AzureTableSource', + 'HdfsReadSettings', + 'HttpReadSettings', 'SftpReadSettings', - 'SftpServerLinkedService', - 'ShopifyLinkedService', - 'ShopifyObjectDataset', - 'ShopifySource', - 'SparkLinkedService', - 'SparkObjectDataset', - 'SparkSource', + 'FtpReadSettings', + 'FileServerReadSettings', + 'AmazonS3ReadSettings', + 'AzureDataLakeStoreReadSettings', + 'AzureBlobFSReadSettings', + 'AzureBlobStorageReadSettings', + 'StoreReadSettings', + 'BinarySource', + 'JsonSource', + 'FormatReadSettings', + 'DelimitedTextReadSettings', + 'DelimitedTextSource', + 'ParquetSource', + 'AvroSource', + 'CopySource', + 'LookupActivity', + 'AzureDataExplorerCommandActivity', + 'LogStorageSettings', + 'DeleteActivity', + 'SqlServerStoredProcedureActivity', + 'CustomActivityReferenceObject', + 'CustomActivity', + 'SSISAccessCredential', + 'SSISLogLocation', + 'SSISPropertyOverride', + 'SSISExecutionParameter', + 'SSISExecutionCredential', + 'SSISPackageLocation', + 'ExecuteSSISPackageActivity', + 'HDInsightSparkActivity', + 'HDInsightStreamingActivity', + 'HDInsightMapReduceActivity', + 'HDInsightPigActivity', + 'HDInsightHiveActivity', + 'RedirectIncompatibleRowSettings', + 'StagingSettings', + 'CosmosDbMongoDbApiSink', + 'SalesforceServiceCloudSink', + 'SalesforceSink', + 'AzureDataExplorerSink', + 'CommonDataServiceForAppsSink', + 'DynamicsCrmSink', + 'DynamicsSink', + 'MicrosoftAccessSink', + 'InformixSink', + 'OdbcSink', + 'AzureSearchIndexSink', + 'AzureBlobFSSink', + 'AzureDataLakeStoreSink', + 'OracleSink', + 'PolybaseSettings', 'SqlDWSink', - 'SqlDWSource', 'SqlMISink', - 'SqlMISource', - 'SqlServerLinkedService', + 'AzureSqlSink', 'SqlServerSink', - 'SqlServerSource', - 'SqlServerStoredProcedureActivity', - 'SqlServerTableDataset', 'SqlSink', - 'SqlSource', - 'SquareLinkedService', - 'SquareObjectDataset', - 'SquareSource', - 'SSISAccessCredential', + 'DocumentDbCollectionSink', + 'FileSystemSink', + 'BlobSink', + 'FileServerWriteSettings', + 'AzureDataLakeStoreWriteSettings', + 'AzureBlobFSWriteSettings', + 'AzureBlobStorageWriteSettings', + 'StoreWriteSettings', + 'BinarySink', + 'ParquetSink', + 'JsonWriteSettings', + 'DelimitedTextWriteSettings', + 'FormatWriteSettings', + 'AvroWriteSettings', + 'AvroSink', + 'AzureTableSink', + 'AzureQueueSink', + 'SapCloudForCustomerSink', + 'AzureMySqlSink', + 'AzurePostgreSqlSink', + 'JsonSink', + 'DelimitedTextSink', + 'CopySink', + 'CopyActivity', + 'ExecutionActivity', + 'WebHookActivity', + 'AppendVariableActivity', + 'SetVariableActivity', + 'FilterActivity', + 'ValidationActivity', + 'UntilActivity', + 'WaitActivity', + 'ForEachActivity', + 'IfConditionActivity', + 'ExecutePipelineActivity', + 'ControlActivity', + 'LinkedIntegrationRuntime', + 'SelfHostedIntegrationRuntimeNode', + 'SelfHostedIntegrationRuntimeStatus', + 'ManagedIntegrationRuntimeOperationResult', + 'ManagedIntegrationRuntimeError', + 'ManagedIntegrationRuntimeNode', + 'ManagedIntegrationRuntimeStatus', + 'LinkedIntegrationRuntimeRbacAuthorization', + 'LinkedIntegrationRuntimeKeyAuthorization', + 'LinkedIntegrationRuntimeType', + 'SelfHostedIntegrationRuntime', + 'EntityReference', + 'IntegrationRuntimeDataProxyProperties', + 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeSsisCatalogInfo', + 'IntegrationRuntimeSsisProperties', + 'IntegrationRuntimeVNetProperties', + 'IntegrationRuntimeComputeProperties', + 'ManagedIntegrationRuntime', + 'IntegrationRuntimeNodeIpAddress', + 'SsisVariable', 'SsisEnvironment', + 'SsisParameter', + 'SsisPackage', 'SsisEnvironmentReference', - 'SSISExecutionCredential', - 'SSISExecutionParameter', + 'SsisProject', 'SsisFolder', - 'SSISLogLocation', 'SsisObjectMetadata', 'SsisObjectMetadataListResponse', - 'SsisObjectMetadataStatusResponse', - 'SsisPackage', - 'SSISPackageLocation', - 'SsisParameter', - 'SsisProject', - 'SSISPropertyOverride', - 'SsisVariable', - 'StagingSettings', - 'StoredProcedureParameter', - 'StoreReadSettings', - 'StoreWriteSettings', - 'SubResource', - 'SybaseLinkedService', - 'SybaseSource', - 'SybaseTableDataset', - 'TeradataLinkedService', - 'TeradataPartitionSettings', - 'TeradataSource', - 'TeradataTableDataset', - 'TextFormat', - 'Trigger', - 'TriggerDependencyReference', - 'TriggerPipelineReference', - 'TriggerReference', - 'TriggerResource', - 'TriggerRun', - 'TriggerRunsQueryResponse', - 'TriggerSubscriptionOperationStatus', - 'TumblingWindowTrigger', - 'TumblingWindowTriggerDependencyReference', - 'UntilActivity', - 'UpdateIntegrationRuntimeNodeRequest', - 'UpdateIntegrationRuntimeRequest', - 'UserAccessPolicy', - 'UserProperty', - 'ValidationActivity', - 'VariableSpecification', - 'VerticaLinkedService', - 'VerticaSource', - 'VerticaTableDataset', - 'WaitActivity', - 'WebActivity', - 'WebActivityAuthentication', - 'WebAnonymousAuthentication', - 'WebBasicAuthentication', - 'WebClientCertificateAuthentication', - 'WebHookActivity', - 'WebLinkedService', - 'WebLinkedServiceTypeProperties', - 'WebSource', - 'WebTableDataset', - 'XeroLinkedService', - 'XeroObjectDataset', - 'XeroSource', - 'ZohoLinkedService', - 'ZohoObjectDataset', - 'ZohoSource', + 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeMonitoringData', + 'IntegrationRuntimeAuthKeys', + 'IntegrationRuntimeRegenerateKeyParameters', + 'IntegrationRuntimeConnectionInfo', 'OperationPaged', 'FactoryPaged', 'IntegrationRuntimeResourcePaged', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py deleted file mode 100644 index aeb32319f20a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ /dev/null @@ -1,29103 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model -from msrest.exceptions import HttpOperationError - - -class AccessPolicyResponse(Model): - """Get Data Plane read only token response definition. - - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str - """ - - _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = kwargs.get('policy', None) - self.access_token = kwargs.get('access_token', None) - self.data_plane_url = kwargs.get('data_plane_url', None) - - -class Activity(Model): - """A pipeline activity. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ExecutionActivity, ControlActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} - } - - def __init__(self, **kwargs): - super(Activity, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.depends_on = kwargs.get('depends_on', None) - self.user_properties = kwargs.get('user_properties', None) - self.type = None - - -class ActivityDependency(Model): - """Activity dependency information. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the - dependency. - :type dependency_conditions: list[str or - ~azure.mgmt.datafactory.models.DependencyCondition] - """ - - _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, - } - - def __init__(self, **kwargs): - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.activity = kwargs.get('activity', None) - self.dependency_conditions = kwargs.get('dependency_conditions', None) - - -class ActivityPolicy(Model): - """Execution policy for an activity. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param timeout: Specifies the timeout for the activity to run. The default - timeout is 7 days. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type retry: object - :param retry_interval_in_seconds: Interval between each retry attempt (in - seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered - as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered - as secure and will not be logged to monitoring. - :type secure_output: bool - """ - - _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.timeout = kwargs.get('timeout', None) - self.retry = kwargs.get('retry', None) - self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) - self.secure_input = kwargs.get('secure_input', None) - self.secure_output = kwargs.get('secure_output', None) - - -class ActivityRun(Model): - """Information about an activity run in a pipeline. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar pipeline_name: The name of the pipeline. - :vartype pipeline_name: str - :ivar pipeline_run_id: The id of the pipeline run. - :vartype pipeline_run_id: str - :ivar activity_name: The name of the activity. - :vartype activity_name: str - :ivar activity_type: The type of the activity. - :vartype activity_type: str - :ivar activity_run_id: The id of the activity run. - :vartype activity_run_id: str - :ivar linked_service_name: The name of the compute linked service. - :vartype linked_service_name: str - :ivar status: The status of the activity run. - :vartype status: str - :ivar activity_run_start: The start time of the activity run in 'ISO 8601' - format. - :vartype activity_run_start: datetime - :ivar activity_run_end: The end time of the activity run in 'ISO 8601' - format. - :vartype activity_run_end: datetime - :ivar duration_in_ms: The duration of the activity run. - :vartype duration_in_ms: int - :ivar input: The input for the activity. - :vartype input: object - :ivar output: The output for the activity. - :vartype output: object - :ivar error: The error if any from the activity run. - :vartype error: object - """ - - _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.pipeline_name = None - self.pipeline_run_id = None - self.activity_name = None - self.activity_type = None - self.activity_run_id = None - self.linked_service_name = None - self.status = None - self.activity_run_start = None - self.activity_run_end = None - self.duration_in_ms = None - self.input = None - self.output = None - self.error = None - - -class ActivityRunsQueryResponse(Model): - """A list activity runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) - - -class LinkedService(Model): - """The Azure Data Factory nested object which contains the information and - credential which can be used to connect with related store or compute - resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, - RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, - AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, - SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} - } - - def __init__(self, **kwargs): - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.connect_via = kwargs.get('connect_via', None) - self.description = kwargs.get('description', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - self.type = None - - -class AmazonMWSLinkedService(LinkedService): - """Amazon Marketplace Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com) - :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to - retrieve data from. To retrieve data from multiple Marketplace IDs, - separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) - :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. - :type seller_id: object - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonMWSLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.marketplace_id = kwargs.get('marketplace_id', None) - self.seller_id = kwargs.get('seller_id', None) - self.mws_auth_token = kwargs.get('mws_auth_token', None) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_key = kwargs.get('secret_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AmazonMWS' - - -class Dataset(Model): - """The Azure Data Factory nested object which identifies data within different - data stores, such as tables, files, folders, and documents. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, - CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, - AmazonS3Dataset - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} - } - - def __init__(self, **kwargs): - super(Dataset, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.structure = kwargs.get('structure', None) - self.schema = kwargs.get('schema', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) - self.type = None - - -class AmazonMWSObjectDataset(Dataset): - """Amazon Marketplace Web Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonMWSObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AmazonMWSObject' - - -class CopySource(Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, - OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, - SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, - XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, - HubspotSource, HiveSource, HBaseSource, GreenplumSource, - GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - JsonSource, DelimitedTextSource, ParquetSource, AvroSource - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} - } - - def __init__(self, **kwargs): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.type = None - - -class AmazonMWSSource(CopySource): - """A copy activity Amazon Marketplace Web Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonMWSSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AmazonMWSSource' - - -class AmazonRedshiftLinkedService(LinkedService): - """Linked service for Amazon Redshift. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The name of the Amazon Redshift server. Type: - string (or Expression with resultType string). - :type server: object - :param username: The username of the Amazon Redshift source. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift - source. Type: string (or Expression with resultType string). - :type database: object - :param port: The TCP port number that the Amazon Redshift server uses to - listen for client connections. The default value is 5439. Type: integer - (or Expression with resultType integer). - :type port: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.database = kwargs.get('database', None) - self.port = kwargs.get('port', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AmazonRedshift' - - -class AmazonRedshiftSource(CopySource): - """A copy activity source for Amazon Redshift Source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param redshift_unload_settings: The Amazon S3 settings needed for the - interim Amazon S3 when copying from Amazon Redshift with unload. With - this, data from Amazon Redshift source will be unloaded into S3 first and - then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: - ~azure.mgmt.datafactory.models.RedshiftUnloadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, - } - - def __init__(self, **kwargs): - super(AmazonRedshiftSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) - self.type = 'AmazonRedshiftSource' - - -class AmazonRedshiftTableDataset(Dataset): - """The Amazon Redshift table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The Amazon Redshift table name. Type: string (or Expression - with resultType string). - :type table: object - :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema - name. Type: string (or Expression with resultType string). - :type amazon_redshift_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonRedshiftTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.amazon_redshift_table_dataset_schema = kwargs.get('amazon_redshift_table_dataset_schema', None) - self.type = 'AmazonRedshiftTable' - - -class AmazonS3Dataset(Dataset): - """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: - string (or Expression with resultType string). - :type bucket_name: object - :param key: The key of the Amazon S3 object. Type: string (or Expression - with resultType string). - :type key: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param version: The version for the S3 object. Type: string (or Expression - with resultType string). - :type version: object - :param modified_datetime_start: The start of S3 object's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of S3 object's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 - object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AmazonS3Dataset, self).__init__(**kwargs) - self.bucket_name = kwargs.get('bucket_name', None) - self.key = kwargs.get('key', None) - self.prefix = kwargs.get('prefix', None) - self.version = kwargs.get('version', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AmazonS3Object' - - -class AmazonS3LinkedService(LinkedService): - """Linked service for Amazon S3. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param access_key_id: The access key identifier of the Amazon S3 Identity - and Access Management (IAM) user. Type: string (or Expression with - resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Identity - and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the - S3 Connector. This is an optional property; change it only if you want to - try a different service endpoint or want to switch between https and http. - Type: string (or Expression with resultType string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonS3LinkedService, self).__init__(**kwargs) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AmazonS3' - - -class DatasetLocation(Model): - """Dataset location. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - - -class AmazonS3Location(DatasetLocation): - """The location of amazon S3 dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or - Expression with resultType string) - :type bucket_name: object - :param version: Specify the version of amazon S3. Type: string (or - Expression with resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonS3Location, self).__init__(**kwargs) - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) - - -class StoreReadSettings(Model): - """Connector read setting. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - - -class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AmazonS3ReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebHookActivity, AppendVariableActivity, - SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, - WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} - } - - def __init__(self, **kwargs): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' - - -class AppendVariableActivity(ControlActivity): - """Append value for a Variable of type Array. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be - appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AppendVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - self.type = 'AppendVariable' - - -class AvroDataset(Dataset): - """Avro dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: 'none', 'deflate', - 'snappy', 'xz', 'bzip2' - :type avro_compression_codec: str or - ~azure.mgmt.datafactory.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(AvroDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.avro_compression_codec = kwargs.get('avro_compression_codec', None) - self.avro_compression_level = kwargs.get('avro_compression_level', None) - self.type = 'Avro' - - -class DatasetStorageFormat(Model): - """The format definition of a storage. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, - TextFormat - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} - } - - def __init__(self, **kwargs): - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.serializer = kwargs.get('serializer', None) - self.deserializer = kwargs.get('deserializer', None) - self.type = None - - -class AvroFormat(DatasetStorageFormat): - """The data stored in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AvroFormat, self).__init__(**kwargs) - self.type = 'AvroFormat' - - -class CopySink(Model): - """A copy activity sink. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, - DelimitedTextSink - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} - } - - def __init__(self, **kwargs): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.write_batch_size = kwargs.get('write_batch_size', None) - self.write_batch_timeout = kwargs.get('write_batch_timeout', None) - self.sink_retry_count = kwargs.get('sink_retry_count', None) - self.sink_retry_wait = kwargs.get('sink_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.type = None - - -class AvroSink(CopySink): - """A copy activity Avro sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, - } - - def __init__(self, **kwargs): - super(AvroSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'AvroSink' - - -class AvroSource(CopySource): - """A copy activity Avro source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(AvroSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'AvroSource' - - -class FormatWriteSettings(Model): - """Format write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - - -class AvroWriteSettings(FormatWriteSettings): - """Avro write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param record_name: Top level record name in write result, which is - required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AvroWriteSettings, self).__init__(**kwargs) - self.record_name = kwargs.get('record_name', None) - self.record_namespace = kwargs.get('record_namespace', None) - - -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Batch account name. Type: string - (or Expression with resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or - Expression with resultType string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or - Expression with resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBatchLinkedService, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.access_key = kwargs.get('access_key', None) - self.batch_uri = kwargs.get('batch_uri', None) - self.pool_name = kwargs.get('pool_name', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureBatch' - - -class AzureBlobDataset(Dataset): - """The Azure Blob storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Blob storage. Type: string (or - Expression with resultType string). - :type folder_path: object - :param table_root_location: The root of blob path. Type: string (or - Expression with resultType string). - :type table_root_location: object - :param file_name: The name of the Azure Blob. Type: string (or Expression - with resultType string). - :type file_name: object - :param modified_datetime_start: The start of Azure Blob's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of Azure Blob's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AzureBlobDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.table_root_location = kwargs.get('table_root_location', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AzureBlob' - - -class AzureBlobFSDataset(Dataset): - """The Azure Data Lake Storage Gen2 storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. - Type: string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AzureBlobFSFile' - - -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. Type: - string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureBlobFS' - - -class AzureBlobFSLocation(DatasetLocation): - """The location of azure blobFS dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param file_system: Specify the fileSystem of azure blobFS. Type: string - (or Expression with resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSLocation, self).__init__(**kwargs) - self.file_system = kwargs.get('file_system', None) - - -class AzureBlobFSReadSettings(StoreReadSettings): - """Azure blobFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureBlobFSSink(CopySink): - """A copy activity Azure Data Lake Storage Gen2 sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = 'AzureBlobFSSink' - - -class AzureBlobFSSource(CopySource): - """A copy activity Azure BlobFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSSource, self).__init__(**kwargs) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) - self.type = 'AzureBlobFSSource' - - -class StoreWriteSettings(Model): - """Connector write settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, - AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} - } - - def __init__(self, **kwargs): - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = None - - -class AzureBlobFSWriteSettings(StoreWriteSettings): - """Azure blobFS write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobFSWriteSettings, self).__init__(**kwargs) - self.block_size_in_mb = kwargs.get('block_size_in_mb', None) - self.type = 'AzureBlobFSWriteSettings' - - -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually - exclusive with connectionString, serviceEndpoint property. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage - resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.service_endpoint = kwargs.get('service_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureBlobStorage' - - -class AzureBlobStorageLocation(DatasetLocation): - """The location of azure blob dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param container: Specify the container of azure blob. Type: string (or - Expression with resultType string). - :type container: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageLocation, self).__init__(**kwargs) - self.container = kwargs.get('container', None) - - -class AzureBlobStorageReadSettings(StoreReadSettings): - """Azure blob read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) - self.block_size_in_mb = kwargs.get('block_size_in_mb', None) - self.type = 'AzureBlobStorageWriteSettings' - - -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param domain: Required. .azuredatabricks.net, domain name of your - Databricks deployment. Type: string (or Expression with resultType - string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: - string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param existing_cluster_id: The id of an existing cluster that will be - used for all runs of this job. Type: string (or Expression with resultType - string). - :type existing_cluster_id: object - :param new_cluster_version: The Spark version of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: Number of worker nodes that new cluster - should have. A string formatted Int32, like '1' means numOfWorker is 1 or - '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or - Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node types of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark - configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark - environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new - cluster. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for - the new cluster. Type: array of strings (or Expression with resultType - array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new - cluster. Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.access_token = kwargs.get('access_token', None) - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureDatabricks' - - -class ExecutionActivity(Activity): - """Base class for all execution activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, - SqlServerStoredProcedureActivity, CustomActivity, - ExecuteSSISPackageActivity, HDInsightSparkActivity, - HDInsightStreamingActivity, HDInsightMapReduceActivity, - HDInsightPigActivity, HDInsightHiveActivity, CopyActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - } - - _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} - } - - def __init__(self, **kwargs): - super(ExecutionActivity, self).__init__(**kwargs) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) - self.type = 'Execution' - - -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data - Explorer command syntax. Type: string (or Expression with resultType - string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) - :type command_timeout: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.command = kwargs.get('command', None) - self.command_timeout = kwargs.get('command_timeout', None) - self.type = 'AzureDataExplorerCommand' - - -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of Azure Data Explorer (the - engine's endpoint). URL will be in the format - https://..kusto.windows.net. Type: string (or - Expression with resultType string) - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.database = kwargs.get('database', None) - self.tenant = kwargs.get('tenant', None) - self.type = 'AzureDataExplorer' - - -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param ingestion_mapping_name: A name of a pre-created csv mapping that - was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object - :param ingestion_mapping_as_json: An explicit column mapping description - provided in a json format. Type: string. - :type ingestion_mapping_as_json: object - :param flush_immediately: If set to true, any aggregation will be skipped. - Default is false. Type: boolean. - :type flush_immediately: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerSink, self).__init__(**kwargs) - self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) - self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) - self.flush_immediately = kwargs.get('flush_immediately', None) - self.type = 'AzureDataExplorerSink' - - -class AzureDataExplorerSource(CopySource): - """A copy activity Azure Data Explorer (Kusto) source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Required. Database query. Should be a Kusto Query Language - (KQL) query. Type: string (or Expression with resultType string). - :type query: object - :param no_truncation: The name of the Boolean option that controls whether - truncation is applied to result-sets that go beyond a certain row-count - limit. - :type no_truncation: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object - """ - - _validation = { - 'type': {'required': True}, - 'query': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.no_truncation = kwargs.get('no_truncation', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.type = 'AzureDataExplorerSource' - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table: The table name of the Azure Data Explorer database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.table = kwargs.get('table', None) - self.type = 'AzureDataExplorerTable' - - -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Data Lake Analytics account name. - Type: string (or Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Analytics account. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group - name (if different from Data Factory account). Type: string (or Expression - with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string - (or Expression with resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureDataLakeAnalytics' - - -class AzureDataLakeStoreDataset(Dataset): - """Azure Data Lake Store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the file in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in - the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'AzureDataLakeStoreFile' - - -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: - string (or Expression with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Store account. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name - (if different from Data Factory account). Type: string (or Expression with - resultType string). - :type resource_group_name: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.account_name = kwargs.get('account_name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureDataLakeStore' - - -class AzureDataLakeStoreLocation(DatasetLocation): - """The location of azure data lake store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreLocation, self).__init__(**kwargs) - - -class AzureDataLakeStoreReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class AzureDataLakeStoreSink(CopySink): - """A copy activity Azure Data Lake Store sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) - self.type = 'AzureDataLakeStoreSink' - - -class AzureDataLakeStoreSource(CopySource): - """A copy activity Azure Data Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.type = 'AzureDataLakeStoreSource' - - -class AzureDataLakeStoreWriteSettings(StoreWriteSettings): - """Azure data lake store write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' - - -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' - :type method: str or - ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure - Function Activity will call. Type: string (or Expression with resultType - string) - :type function_name: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureFunctionActivity, self).__init__(**kwargs) - self.method = kwargs.get('method', None) - self.function_name = kwargs.get('function_name', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.type = 'AzureFunctionActivity' - - -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param function_app_url: Required. The endpoint of the Azure Function App. - URL will be in the format https://.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.function_app_url = kwargs.get('function_app_url', None) - self.function_key = kwargs.get('function_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureFunction' - - -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType - string). - :type base_url: object - """ - - _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.base_url = kwargs.get('base_url', None) - self.type = 'AzureKeyVault' - - -class SecretBase(Model): - """The base definition of a secret type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SecureString, AzureKeyVaultSecretReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} - } - - def __init__(self, **kwargs): - super(SecretBase, self).__init__(**kwargs) - self.type = None - - -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. - Type: string (or Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The - default value is the latest version of the secret. Type: string (or - Expression with resultType string). - :type secret_version: object - """ - - _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.store = kwargs.get('store', None) - self.secret_name = kwargs.get('secret_name', None) - self.secret_version = kwargs.get('secret_version', None) - self.type = 'AzureKeyVaultSecret' - - -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureMariaDB' - - -class AzureMariaDBSource(CopySource): - """A copy activity Azure MariaDB source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMariaDBSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AzureMariaDBSource' - - -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AzureMariaDBTable' - - -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML - Batch Execution Service endpoint. Keys must match the names of web service - parameters defined in the published Azure ML web service. Values will be - passed in the GlobalParameters property of the Azure ML batch execution - request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying - the output Blob locations. This information will be passed in the - WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying - the input Blob locations.. This information will be passed in the - WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, - } - - def __init__(self, **kwargs): - super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) - self.global_parameters = kwargs.get('global_parameters', None) - self.web_service_outputs = kwargs.get('web_service_outputs', None) - self.web_service_inputs = kwargs.get('web_service_inputs', None) - self.type = 'AzureMLBatchExecution' - - -class AzureMLLinkedService(LinkedService): - """Azure ML Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML - Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model - endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure - ML Web Service endpoint. Type: string (or Expression with resultType - string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMLLinkedService, self).__init__(**kwargs) - self.ml_endpoint = kwargs.get('ml_endpoint', None) - self.api_key = kwargs.get('api_key', None) - self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureML' - - -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in - the Web Service experiment to be updated. Type: string (or Expression with - resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage - linked service holding the .ilearner file that will be uploaded by the - update operation. - :type trained_model_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in - trainedModelLinkedService to represent the .ilearner file that will be - uploaded by the update operation. Type: string (or Expression with - resultType string). - :type trained_model_file_path: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) - self.trained_model_name = kwargs.get('trained_model_name', None) - self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) - self.trained_model_file_path = kwargs.get('trained_model_file_path', None) - self.type = 'AzureMLUpdateResource' - - -class AzureMLWebServiceFile(Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container - name, in the Azure Blob Storage specified by the LinkedService. Type: - string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage - LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, **kwargs): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = kwargs.get('file_path', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - - -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureMySql' - - -class AzureMySqlSink(CopySink): - """A copy activity Azure MySql sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'AzureMySqlSink' - - -class AzureMySqlSource(CopySource): - """A copy activity Azure MySQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AzureMySqlSource' - - -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Azure MySQL database table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AzureMySqlTable' - - -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzurePostgreSql' - - -class AzurePostgreSqlSink(CopySink): - """A copy activity Azure PostgreSQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'AzurePostgreSqlSink' - - -class AzurePostgreSqlSource(CopySource): - """A copy activity Azure PostgreSQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'AzurePostgreSqlSource' - - -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Azure PostgreSQL database which - includes both schema and table. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type table: object - :param azure_postgre_sql_table_dataset_schema: The schema name of the - Azure PostgreSQL database. Type: string (or Expression with resultType - string). - :type azure_postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) - self.type = 'AzurePostgreSqlTable' - - -class AzureQueueSink(CopySink): - """A copy activity Azure Queue sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureQueueSink, self).__init__(**kwargs) - self.type = 'AzureQueueSink' - - -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index_name: Required. The name of the Azure Search Index. Type: - string (or Expression with resultType string). - :type index_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.index_name = kwargs.get('index_name', None) - self.type = 'AzureSearchIndex' - - -class AzureSearchIndexSink(CopySink): - """A copy activity Azure Search Index sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureSearchIndexSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'AzureSearchIndexSink' - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. URL for Azure Search service. Type: string (or - Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSearchLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.key = kwargs.get('key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSearch' - - -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Database. Type: string (or Expression with - resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSqlDatabase' - - -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString - or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSqlDW' - - -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL - Data Warehouse. Type: string (or Expression with resultType string). - :type azure_sql_dw_table_dataset_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string - (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'AzureSqlDWTable' - - -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Managed Instance. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlMILinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureSqlMI' - - -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL - Managed Instance. Type: string (or Expression with resultType string). - :type azure_sql_mi_table_dataset_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. - Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlMITableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'AzureSqlMITable' - - -class AzureSqlSink(CopySink): - """A copy activity Azure SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.type = 'AzureSqlSink' - - -class AzureSqlSource(CopySource): - """A copy activity Azure SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.type = 'AzureSqlSource' - - -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_table_dataset_schema: The schema name of the Azure SQL - database. Type: string (or Expression with resultType string). - :type azure_sql_table_dataset_schema: object - :param table: The table name of the Azure SQL database. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'AzureSqlTable' - - -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureStorage' - - -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The table name of the Azure Table storage. - Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'AzureTable' - - -class AzureTableSink(CopySink): - """A copy activity Azure Table sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_default_partition_key_value: Azure Table default - partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object - :param azure_table_partition_key_name: Azure Table partition key name. - Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object - :param azure_table_row_key_name: Azure Table row key name. Type: string - (or Expression with resultType string). - :type azure_table_row_key_name: object - :param azure_table_insert_type: Azure Table insert type. Type: string (or - Expression with resultType string). - :type azure_table_insert_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureTableSink, self).__init__(**kwargs) - self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) - self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) - self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) - self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) - self.type = 'AzureTableSink' - - -class AzureTableSource(CopySource): - """A copy activity Azure Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_source_query: Azure Table source query. Type: string - (or Expression with resultType string). - :type azure_table_source_query: object - :param azure_table_source_ignore_table_not_found: Azure Table source - ignore table not found. Type: boolean (or Expression with resultType - boolean). - :type azure_table_source_ignore_table_not_found: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureTableSource, self).__init__(**kwargs) - self.azure_table_source_query = kwargs.get('azure_table_source_query', None) - self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) - self.type = 'AzureTableSource' - - -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'AzureTableStorage' - - -class BinaryDataset(Dataset): - """Binary dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary - dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(BinaryDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.compression = kwargs.get('compression', None) - self.type = 'Binary' - - -class BinarySink(CopySink): - """A copy activity Binary sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, **kwargs): - super(BinarySink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'BinarySink' - - -class BinarySource(CopySource): - """A copy activity Binary source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(BinarySource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'BinarySource' - - -class Trigger(Model): - """Azure data factory nested object which contains information about creating - pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, - TumblingWindowTrigger, MultiplePipelineTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} - } - - def __init__(self, **kwargs): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.runtime_state = None - self.annotations = kwargs.get('annotations', None) - self.type = None - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to - pipeline. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - } - - _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} - } - - def __init__(self, **kwargs): - super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.pipelines = kwargs.get('pipelines', None) - self.type = 'MultiplePipelineTrigger' - - -class BlobEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a Blob event occurs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern - provided for trigger to fire. For example, '/records/blobs/december/' will - only fire the trigger for blobs in the december folder under the records - container. At least one of these must be provided: blobPathBeginsWith, - blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern - provided for trigger to fire. For example, 'december/boxes.csv' will only - fire the trigger for blobs named boxes in a december folder. At least one - of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param events: Required. The type of events that cause this trigger to - fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(BlobEventsTrigger, self).__init__(**kwargs) - self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) - self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) - self.events = kwargs.get('events', None) - self.scope = kwargs.get('scope', None) - self.type = 'BlobEventsTrigger' - - -class BlobSink(CopySink): - """A copy activity Azure Blob sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: - boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object - :param blob_writer_date_time_format: Blob writer date time format. Type: - string (or Expression with resultType string). - :type blob_writer_date_time_format: object - :param blob_writer_add_header: Blob writer add header. Type: boolean (or - Expression with resultType boolean). - :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(BlobSink, self).__init__(**kwargs) - self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) - self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) - self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = 'BlobSink' - - -class BlobSource(CopySource): - """A copy activity Azure Blob source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(BlobSource, self).__init__(**kwargs) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) - self.type = 'BlobSource' - - -class BlobTrigger(MultiplePipelineTrigger): - """Trigger that runs every time the selected Blob container changes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will - trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to - handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service - reference. - :type linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, **kwargs): - super(BlobTrigger, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.linked_service = kwargs.get('linked_service', None) - self.type = 'BlobTrigger' - - -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name for connection. Type: string (or - Expression with resultType string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression - with resultType integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CassandraLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Cassandra' - - -class CassandraSource(CopySource): - """A copy activity source for a Cassandra database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression or - Cassandra Query Language (CQL) command. Type: string (or Expression with - resultType string). - :type query: object - :param consistency_level: The consistency level specifies how many - Cassandra servers must respond to a read request before returning data to - the client application. Cassandra checks the specified number of Cassandra - servers for data to satisfy the read request. Must be one of - cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(CassandraSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.consistency_level = kwargs.get('consistency_level', None) - self.type = 'CassandraSource' - - -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Cassandra database. Type: string - (or Expression with resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or - Expression with resultType string). - :type keyspace: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CassandraTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.keyspace = kwargs.get('keyspace', None) - self.type = 'CassandraTable' - - -class ChainingTrigger(Trigger): - """Trigger that allows the referenced pipeline to depend on other pipeline - runs based on runDimension Name/Value pairs. Upstream pipelines should - declare the same runDimension Name and their runs should have the values - for those runDimensions. The referenced pipeline run would be triggered if - the values for the runDimension match for all upstream pipeline runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when all - upstream pipelines complete successfully. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :param run_dimension: Required. Run Dimension property that needs to be - emitted by upstream pipelines. - :type run_dimension: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'depends_on': {'required': True}, - 'run_dimension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, - 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ChainingTrigger, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.depends_on = kwargs.get('depends_on', None) - self.run_dimension = kwargs.get('run_dimension', None) - self.type = 'ChainingTrigger' - - -class CloudError(Model): - """The object that defines the structure of an Azure Data Factory error - response. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.mgmt.datafactory.models.CloudError] - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, - } - - def __init__(self, **kwargs): - super(CloudError, self).__init__(**kwargs) - self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) - self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) - - -class CloudErrorException(HttpOperationError): - """Server responsed with exception of type: 'CloudError'. - - :param deserialize: A deserializer - :param response: Server response to be deserialized. - """ - - def __init__(self, deserialize, response, *args): - - super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args) - - -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) - self.type = 'CommonDataServiceForAppsEntity' - - -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Common Data - Service for Apps instance. 'Online' for Common Data Service for Apps - Online and 'OnPremisesWithIfd' for Common Data Service for Apps - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for - Apps server. The property is required for on-prem and not allowed for - online. Type: string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. - The property is required for on-prem and not allowed for online. Default - is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps - server. The property is required for on-line and not allowed for on-prem. - Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service - for Apps instance. The property is required for on-prem and required for - online when there are more than one Common Data Service for Apps instances - associated with the user. Type: string (or Expression with resultType - string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Common Data Service for Apps server. 'Office365' for online scenario, - 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with - resultType string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Common Data Service for - Apps instance. Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'CommonDataServiceForApps' - - -class CommonDataServiceForAppsSink(CopySink): - """A copy activity Common Data Service for Apps sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'CommonDataServiceForAppsSink' - - -class CommonDataServiceForAppsSource(CopySource): - """A copy activity Common Data Service for Apps source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Common Data Service for Apps (online & on-premises). Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CommonDataServiceForAppsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'CommonDataServiceForAppsSource' - - -class ConcurLinkedService(LinkedService): - """Concur Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. Application client_id supplied by Concur App - Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ConcurLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Concur' - - -class ConcurObjectDataset(Dataset): - """Concur Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ConcurObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ConcurObject' - - -class ConcurSource(CopySource): - """A copy activity Concur Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ConcurSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ConcurSource' - - -class CopyActivity(ExecutionActivity): - """Copy activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular - translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim - staging. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when - EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on - the source or sink to avoid overloading the data store. Type: integer (or - Expression with resultType integer), minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units - that can be used to perform this data movement. Type: integer (or - Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. - Default value is false. Type: boolean (or Expression with resultType - boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row - settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - } - - def __init__(self, **kwargs): - super(CopyActivity, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.sink = kwargs.get('sink', None) - self.translator = kwargs.get('translator', None) - self.enable_staging = kwargs.get('enable_staging', None) - self.staging_settings = kwargs.get('staging_settings', None) - self.parallel_copies = kwargs.get('parallel_copies', None) - self.data_integration_units = kwargs.get('data_integration_units', None) - self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) - self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) - self.preserve_rules = kwargs.get('preserve_rules', None) - self.preserve = kwargs.get('preserve', None) - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.type = 'Copy' - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'CosmosDb' - - -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the CosmosDB (MongoDB - API) database. Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.collection = kwargs.get('collection', None) - self.type = 'CosmosDbMongoDbApiCollection' - - -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The CosmosDB (MongoDB API) connection - string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database - that you want to access. Type: string (or Expression with resultType - string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.database = kwargs.get('database', None) - self.type = 'CosmosDbMongoDbApi' - - -class CosmosDbMongoDbApiSink(CopySink): - """A copy activity sink for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specifies whether the document with same key to be - overwritten (upsert) rather than throw exception (insert). The default - value is "insert". Type: string (or Expression with resultType string). - Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'CosmosDbMongoDbApiSink' - - -class CosmosDbMongoDbApiSource(CopySource): - """A copy activity source for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.type = 'CosmosDbMongoDbApiSource' - - -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Couchbase' - - -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CouchbaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'CouchbaseSource' - - -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CouchbaseTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'CouchbaseTable' - - -class CreateLinkedIntegrationRuntimeRequest(Model): - """The linked integration runtime information. - - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked - integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked - integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the - linked integration runtime belongs to. - :type data_factory_location: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.data_factory_name = kwargs.get('data_factory_name', None) - self.data_factory_location = kwargs.get('data_factory_location', None) - - -class CreateRunResponse(Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str - """ - - _validation = { - 'run_id': {'required': True}, - } - - _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = kwargs.get('run_id', None) - - -class CustomActivity(ExecutionActivity): - """Custom activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or - Expression with resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or - Expression with resultType string). - :type folder_path: object - :param reference_objects: Reference objects - :type reference_objects: - ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no - restriction on the keys or values that can be used. The user specified - custom activity has the full responsibility to consume and interpret the - content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted - for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CustomActivity, self).__init__(**kwargs) - self.command = kwargs.get('command', None) - self.resource_linked_service = kwargs.get('resource_linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_objects = kwargs.get('reference_objects', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.retention_time_in_days = kwargs.get('retention_time_in_days', None) - self.type = 'Custom' - - -class CustomActivityReferenceObject(Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__(self, **kwargs): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = kwargs.get('linked_services', None) - self.datasets = kwargs.get('datasets', None) - - -class CustomDataset(Dataset): - """The custom dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Custom dataset properties. - :type type_properties: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CustomDataset, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) - self.type = 'CustomDataset' - - -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Custom linked service properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) - self.type = 'CustomDataSource' - - -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be - run in the Databricks Workspace. This path must begin with a slash. Type: - string (or Expression with resultType string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this - job.If the notebook takes a parameter that is not specified, the default - value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, **kwargs): - super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.notebook_path = kwargs.get('notebook_path', None) - self.base_parameters = kwargs.get('base_parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksNotebook' - - -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, **kwargs): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.main_class_name = kwargs.get('main_class_name', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksSparkJar' - - -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, **kwargs): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.python_file = kwargs.get('python_file', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksSparkPython' - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains - the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously - used to run the job. Default value is 1. Type: integer (or Expression with - resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should - be selected to run first. The lower the number, the higher the priority. - Default value is 1000. Type: integer (or Expression with resultType - integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: - string (or Expression with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these - values : Semantic, Full and SingleBox. Type: string (or Expression with - resultType string). - :type compilation_mode: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) - self.priority = kwargs.get('priority', None) - self.parameters = kwargs.get('parameters', None) - self.runtime_version = kwargs.get('runtime_version', None) - self.compilation_mode = kwargs.get('compilation_mode', None) - self.type = 'DataLakeAnalyticsU-SQL' - - -class DatasetCompression(Model): - """The compression method used on a dataset. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, - DatasetGZipCompression, DatasetBZip2Compression - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} - } - - def __init__(self, **kwargs): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - - -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' - - -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The Deflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetDeflateCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) - self.type = 'Deflate' - - -class DatasetFolder(Model): - """The folder that this Dataset is in. If not specified, Dataset will appear - at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(DatasetFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The GZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetGZipCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) - self.type = 'GZip' - - -class DatasetReference(Model): - """Dataset reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: - "DatasetReference" . - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "DatasetReference" - - def __init__(self, **kwargs): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) - - -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__(self, **kwargs): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) - self.type = 'ZipDeflate' - - -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Db2LinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Db2' - - -class Db2Source(CopySource): - """A copy activity source for Db2 databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Db2Source, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'Db2Source' - - -class Db2TableDataset(Dataset): - """The Db2 table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param db2_table_dataset_schema: The Db2 schema name. Type: string (or - Expression with resultType string). - :type db2_table_dataset_schema: object - :param table: The Db2 table name. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Db2TableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.db2_table_dataset_schema = kwargs.get('db2_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'Db2Table' - - -class DeleteActivity(ExecutionActivity): - """Delete activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path - will be deleted recursively. Default is false. Type: boolean (or - Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to - connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity - execution. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide - when enableLogging is true. - :type log_storage_settings: - ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, **kwargs): - super(DeleteActivity, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.enable_logging = kwargs.get('enable_logging', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.dataset = kwargs.get('dataset', None) - self.type = 'Delete' - - -class DelimitedTextDataset(Dataset): - """Delimited text dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: - :type compression_codec: object - :param compression_level: The data compression method used for - DelimitedText. - :type compression_level: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.compression_level = kwargs.get('compression_level', None) - self.quote_char = kwargs.get('quote_char', None) - self.escape_char = kwargs.get('escape_char', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.null_value = kwargs.get('null_value', None) - self.type = 'DelimitedText' - - -class FormatReadSettings(Model): - """Format read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - - -class DelimitedTextReadSettings(FormatReadSettings): - """Delimited text read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip - when reading data from input files. Type: integer (or Expression with - resultType integer). - :type skip_line_count: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextReadSettings, self).__init__(**kwargs) - self.skip_line_count = kwargs.get('skip_line_count', None) - - -class DelimitedTextSink(CopySink): - """A copy activity DelimitedText sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'DelimitedTextSink' - - -class DelimitedTextSource(CopySource): - """A copy activity DelimitedText source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'DelimitedTextSource' - - -class DelimitedTextWriteSettings(FormatWriteSettings): - """Delimited text write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param quote_all_text: Indicates whether string values should always be - enclosed with quotes. Type: boolean (or Expression with resultType - boolean). - :type quote_all_text: object - :param file_extension: Required. The file extension used to create the - files. Type: string (or Expression with resultType string). - :type file_extension: object - """ - - _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DelimitedTextWriteSettings, self).__init__(**kwargs) - self.quote_all_text = kwargs.get('quote_all_text', None) - self.file_extension = kwargs.get('file_extension', None) - - -class DependencyReference(Model): - """Referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, - TriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} - } - - def __init__(self, **kwargs): - super(DependencyReference, self).__init__(**kwargs) - self.type = None - - -class DistcpSettings(Model): - """Distcp settings. - - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn - ResourceManager endpoint. Type: string (or Expression with resultType - string). - :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which - will be used to store temp Distcp command script. The script file is - generated by ADF and will be removed after Copy job finished. Type: string - (or Expression with resultType string). - :type temp_script_path: object - :param distcp_options: Specifies the Distcp options. Type: string (or - Expression with resultType string). - :type distcp_options: object - """ - - _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, - } - - _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) - self.temp_script_path = kwargs.get('temp_script_path', None) - self.distcp_options = kwargs.get('distcp_options', None) - - -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. Document Database collection name. Type: - string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.collection_name = kwargs.get('collection_name', None) - self.type = 'DocumentDbCollection' - - -class DocumentDbCollectionSink(CopySink): - """A copy activity Document Database Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param nesting_separator: Nested properties separator. Default is . (dot). - Type: string (or Expression with resultType string). - :type nesting_separator: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DocumentDbCollectionSink, self).__init__(**kwargs) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'DocumentDbCollectionSink' - - -class DocumentDbCollectionSource(CopySource): - """A copy activity Document Database Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Documents query. Type: string (or Expression with resultType - string). - :type query: object - :param nesting_separator: Nested properties separator. Type: string (or - Expression with resultType string). - :type nesting_separator: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DocumentDbCollectionSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.type = 'DocumentDbCollectionSource' - - -class DrillLinkedService(LinkedService): - """Drill server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DrillLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Drill' - - -class DrillSource(CopySource): - """A copy activity Drill server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DrillSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DrillSource' - - -class DrillTableDataset(Dataset): - """Drill server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression - with resultType string). - :type table: object - :param drill_table_dataset_schema: The schema name of the Drill. Type: - string (or Expression with resultType string). - :type drill_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DrillTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) - self.type = 'DrillTable' - - -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and - Operations) instance OData endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - Mark this field as a SecureString to store it securely in Data Factory, or - reference a secret stored in Azure Key Vault. Type: string (or Expression - with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or - tenant ID) under which your application resides. Retrieve it by hovering - the mouse in the top-right corner of the Azure portal. Type: string (or - Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting - authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsAXLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'DynamicsAX' - - -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the Dynamics AX OData entity. Type: - string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsAXResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'DynamicsAXResource' - - -class DynamicsAXSource(CopySource): - """A copy activity Dynamics AX source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsAXSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DynamicsAXSource' - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) - self.type = 'DynamicsCrmEntity' - - -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics CRM - instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for - Dynamics CRM on-premises with Ifd. Type: string (or Expression with - resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. - The property is required for on-prem and not allowed for online. Type: - string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The - property is required for on-line and not allowed for on-prem. Type: string - (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM - instance. The property is required for on-prem and required for online - when there are more than one Dynamics CRM instances associated with the - user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Dynamics CRM instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'DynamicsCrm' - - -class DynamicsCrmSink(CopySink): - """A copy activity Dynamics CRM sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, **kwargs): - super(DynamicsCrmSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'DynamicsCrmSink' - - -class DynamicsCrmSource(CopySource): - """A copy activity Dynamics CRM source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsCrmSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DynamicsCrmSource' - - -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) - self.type = 'DynamicsEntity' - - -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics - instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for - Dynamics on-premises with Ifd. Type: string (or Expression with resultType - string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics server. The - property is required for on-prem and not allowed for online. Type: string - (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property - is required for on-line and not allowed for on-prem. Type: string (or - Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics instance. - The property is required for on-prem and required for online when there - are more than one Dynamics instances associated with the user. Type: - string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - with Ifd scenario. Type: string (or Expression with resultType string). - :type authentication_type: object - :param username: Required. User name to access the Dynamics instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Dynamics' - - -class DynamicsSink(CopySink): - """A copy activity Dynamics sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether ignore null values - from input dataset (except key fields) during write operation. Default is - false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, **kwargs): - super(DynamicsSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'DynamicsSink' - - -class DynamicsSource(CopySource): - """A copy activity Dynamics source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(DynamicsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'DynamicsSource' - - -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. - eloqua.example.com) - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua - account in the form: sitename/username. (i.e. Eloqua/Alice) - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(EloquaLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Eloqua' - - -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(EloquaObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'EloquaObject' - - -class EloquaSource(CopySource): - """A copy activity Eloqua server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(EloquaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'EloquaSource' - - -class EntityReference(Model): - """The entity reference. - - :param type: The type of this referenced entity. Possible values include: - 'IntegrationRuntimeReference', 'LinkedServiceReference' - :type type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(EntityReference, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.reference_name = kwargs.get('reference_name', None) - - -class ExecutePipelineActivity(ControlActivity): - """Execute pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait - for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(ExecutePipelineActivity, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.parameters = kwargs.get('parameters', None) - self.wait_on_completion = kwargs.get('wait_on_completion', None) - self.type = 'ExecutePipeline' - - -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value - should be "x86" or "x64". Type: string (or Expression with resultType - string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: - string (or Expression with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. - Type: string (or Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: - ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the - SSIS package. - :type project_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the - SSIS package. - :type package_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers - to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers - to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS - package. - :type property_overrides: dict[str, - ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, - } - - def __init__(self, **kwargs): - super(ExecuteSSISPackageActivity, self).__init__(**kwargs) - self.package_location = kwargs.get('package_location', None) - self.runtime = kwargs.get('runtime', None) - self.logging_level = kwargs.get('logging_level', None) - self.environment_path = kwargs.get('environment_path', None) - self.execution_credential = kwargs.get('execution_credential', None) - self.connect_via = kwargs.get('connect_via', None) - self.project_parameters = kwargs.get('project_parameters', None) - self.package_parameters = kwargs.get('package_parameters', None) - self.project_connection_managers = kwargs.get('project_connection_managers', None) - self.package_connection_managers = kwargs.get('package_connection_managers', None) - self.property_overrides = kwargs.get('property_overrides', None) - self.log_location = kwargs.get('log_location', None) - self.type = 'ExecuteSSISPackage' - - -class ExposureControlRequest(Model): - """The exposure control request. - - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = kwargs.get('feature_name', None) - self.feature_type = kwargs.get('feature_type', None) - - -class ExposureControlResponse(Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - - -class Expression(Model): - """Azure Data Factory expression definition. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression" . - :vartype type: str - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - type = "Expression" - - def __init__(self, **kwargs): - super(Expression, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class Resource(Model): - """Azure Data Factory top-level resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.e_tag = None - - -class Factory(Resource): - """Factory resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, **kwargs): - super(Factory, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.identity = kwargs.get('identity', None) - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = kwargs.get('repo_configuration', None) - - -class FactoryRepoConfiguration(Model): - """Factory's git repo information. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} - } - - def __init__(self, **kwargs): - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.repository_name = kwargs.get('repository_name', None) - self.collaboration_branch = kwargs.get('collaboration_branch', None) - self.root_folder = kwargs.get('root_folder', None) - self.last_commit_id = kwargs.get('last_commit_id', None) - self.type = None - - -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param host_name: GitHub Enterprise host name. For example: - https://github.mydomain.com - :type host_name: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FactoryGitHubConfiguration, self).__init__(**kwargs) - self.host_name = kwargs.get('host_name', None) - self.type = 'FactoryGitHubConfiguration' - - -class FactoryIdentity(Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. The identity type. Currently the only supported type - is 'SystemAssigned'. Default value: "SystemAssigned" . - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - type = "SystemAssigned" - - def __init__(self, **kwargs): - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - - -class FactoryRepoUpdate(Model): - """Factory's git repo information. - - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, **kwargs): - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = kwargs.get('factory_resource_id', None) - self.repo_configuration = kwargs.get('repo_configuration', None) - - -class FactoryUpdateParameters(Model): - """Parameters for updating a factory resource. - - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - } - - def __init__(self, **kwargs): - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.identity = kwargs.get('identity', None) - - -class FactoryVSTSConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - 'project_name': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FactoryVSTSConfiguration, self).__init__(**kwargs) - self.project_name = kwargs.get('project_name', None) - self.tenant_id = kwargs.get('tenant_id', None) - self.type = 'FactoryVSTSConfiguration' - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression - with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'FileServer' - - -class FileServerLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerLocation, self).__init__(**kwargs) - - -class FileServerReadSettings(StoreReadSettings): - """File server read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class FileServerWriteSettings(StoreWriteSettings): - """File server write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FileServerWriteSettings, self).__init__(**kwargs) - self.type = 'FileServerWriteSettings' - - -class FileShareDataset(Dataset): - """An on-premises file system dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the on-premises file system. Type: string - (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the on-premises file system. Type: string - (or Expression with resultType string). - :type file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of - files in the folderPath rather than all files. Type: string (or Expression - with resultType string). - :type file_filter: object - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(FileShareDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.file_filter = kwargs.get('file_filter', None) - self.compression = kwargs.get('compression', None) - self.type = 'FileShare' - - -class FileSystemSink(CopySink): - """A copy activity file system sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileSystemSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.type = 'FileSystemSink' - - -class FileSystemSource(CopySource): - """A copy activity file system source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileSystemSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.type = 'FileSystemSource' - - -class FilterActivity(ControlActivity): - """Filter and return results from input array based on the conditions. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, - } - - def __init__(self, **kwargs): - super(FilterActivity, self).__init__(**kwargs) - self.items = kwargs.get('items', None) - self.condition = kwargs.get('condition', None) - self.type = 'Filter' - - -class ForEachActivity(ControlActivity): - """This activity is used for iterating over a collection and execute given - activities. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param is_sequential: Should the loop be executed in sequence or in - parallel (max 50) - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of - parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, **kwargs): - super(ForEachActivity, self).__init__(**kwargs) - self.is_sequential = kwargs.get('is_sequential', None) - self.batch_count = kwargs.get('batch_count', None) - self.items = kwargs.get('items', None) - self.activities = kwargs.get('activities', None) - self.type = 'ForEach' - - -class FtpReadSettings(StoreReadSettings): - """Ftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param use_binary_transfer: Specify whether to use binary transfer mode - for FTP stores. - :type use_binary_transfer: bool - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(FtpReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.use_binary_transfer = kwargs.get('use_binary_transfer', None) - - -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the FTP server. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for - client connections. Default value is 21. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS - channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP - server SSL certificate when connect over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FtpServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.type = 'FtpServer' - - -class FtpServerLocation(DatasetLocation): - """The location of ftp server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FtpServerLocation, self).__init__(**kwargs) - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - } - - def __init__(self, **kwargs): - super(GetMetadataActivity, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) - self.field_list = kwargs.get('field_list', None) - self.type = 'GetMetadata' - - -class GetSsisObjectMetadataRequest(Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = kwargs.get('metadata_path', None) - - -class GitHubAccessTokenRequest(Model): - """Get GitHub access token request definition. - - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base - URL. - :type git_hub_access_token_base_url: str - """ - - _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, - } - - _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = kwargs.get('git_hub_access_code', None) - self.git_hub_client_id = kwargs.get('git_hub_client_id', None) - self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) - - -class GitHubAccessTokenResponse(Model): - """Get GitHub access token response definition. - - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str - """ - - _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = kwargs.get('git_hub_access_token', None) - - -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_customer_id: Required. The Client customer ID of the AdWords - account that you want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the - manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.client_customer_id = kwargs.get('client_customer_id', None) - self.developer_token = kwargs.get('developer_token', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'GoogleAdWords' - - -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'GoogleAdWordsObject' - - -class GoogleAdWordsSource(CopySource): - """A copy activity Google AdWords service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleAdWordsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'GoogleAdWordsSource' - - -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery - projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google - Drive. Allowing Google Drive access enables support for federated tables - that combine BigQuery data with data from Google Drive. The default value - is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.project = kwargs.get('project', None) - self.additional_projects = kwargs.get('additional_projects', None) - self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'GoogleBigQuery' - - -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - database + table properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type dataset: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.dataset = kwargs.get('dataset', None) - self.type = 'GoogleBigQueryObject' - - -class GoogleBigQuerySource(CopySource): - """A copy activity Google BigQuery service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GoogleBigQuerySource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'GoogleBigQuerySource' - - -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GreenplumLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Greenplum' - - -class GreenplumSource(CopySource): - """A copy activity Greenplum Database source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GreenplumSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'GreenplumSource' - - -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression - with resultType string). - :type table: object - :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: - string (or Expression with resultType string). - :type greenplum_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(GreenplumTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) - self.type = 'GreenplumTable' - - -class HBaseLinkedService(LinkedService): - """HBase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the HBase server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the HBase instance uses to listen for - client connections. The default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version) - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use - to connect to the HBase server. Possible values include: 'Anonymous', - 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HBaseLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'HBase' - - -class HBaseObjectDataset(Dataset): - """HBase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HBaseObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'HBaseObject' - - -class HBaseSource(CopySource): - """A copy activity HBase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HBaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'HBaseSource' - - -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - HDFS. Possible values are: Anonymous and Windows. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__(self, **kwargs): - super(HdfsLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.type = 'Hdfs' - - -class HdfsLocation(DatasetLocation): - """The location of HDFS. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HdfsLocation, self).__init__(**kwargs) - - -class HdfsReadSettings(StoreReadSettings): - """HDFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, **kwargs): - super(HdfsReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.distcp_settings = kwargs.get('distcp_settings', None) - - -class HdfsSource(CopySource): - """A copy activity HDFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, **kwargs): - super(HdfsSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.distcp_settings = kwargs.get('distcp_settings', None) - self.type = 'HdfsSource' - - -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when - the HDInsight cluster is with ESP (Enterprise Security Package) - :type query_timeout: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(HDInsightHiveActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.variables = kwargs.get('variables', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.type = 'HDInsightHive' - - -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or - Expression with resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked - service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP - (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the - HDInsight is ADLS Gen2. Type: string (or Expression with resultType - string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HDInsightLinkedService, self).__init__(**kwargs) - self.cluster_uri = kwargs.get('cluster_uri', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.is_esp_enabled = kwargs.get('is_esp_enabled', None) - self.file_system = kwargs.get('file_system', None) - self.type = 'HDInsight' - - -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with - resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with - resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job - request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightMapReduceActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.class_name = kwargs.get('class_name', None) - self.jar_file_path = kwargs.get('jar_file_path', None) - self.jar_linked_service = kwargs.get('jar_linked_service', None) - self.jar_libs = kwargs.get('jar_libs', None) - self.defines = kwargs.get('defines', None) - self.type = 'HDInsightMapReduce' - - -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_size: Required. Number of worker/data nodes in the cluster. - Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand - HDInsight cluster. Specifies how long the on-demand HDInsight cluster - stays alive after completion of an activity run if there are no other - active jobs in the cluster. The minimum value is 5 mins. Type: string (or - Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string - (or Expression with resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be - used by the on-demand cluster for storing and processing data. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host - the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the - hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the - cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be - distinct with timestamp. Type: string (or Expression with resultType - string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string - (or Expression with resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to - cluster’s node (for Linux). Type: string (or Expression with resultType - string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect - cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage - accounts for the HDInsight linked service so that the Data Factory service - can register them on your behalf. - :type additional_linked_service_names: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service - that point to the HCatalog database. The on-demand HDInsight cluster is - created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with - resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. - Type: string (or Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as - in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters - (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters - (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters - (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration - parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters - (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters - (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters - (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the - HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the - HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for - the HDInsight cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand - cluster once it's up. Please refer to - https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the - cluster should be joined after creation. Type: string (or Expression with - resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If - virtualNetworkId was specified, then this property is required. Type: - string (or Expression with resultType string). - :type subnet_name: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) - self.cluster_size = kwargs.get('cluster_size', None) - self.time_to_live = kwargs.get('time_to_live', None) - self.version = kwargs.get('version', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.host_subscription_id = kwargs.get('host_subscription_id', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.cluster_resource_group = kwargs.get('cluster_resource_group', None) - self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) - self.cluster_user_name = kwargs.get('cluster_user_name', None) - self.cluster_password = kwargs.get('cluster_password', None) - self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) - self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) - self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.cluster_type = kwargs.get('cluster_type', None) - self.spark_version = kwargs.get('spark_version', None) - self.core_configuration = kwargs.get('core_configuration', None) - self.h_base_configuration = kwargs.get('h_base_configuration', None) - self.hdfs_configuration = kwargs.get('hdfs_configuration', None) - self.hive_configuration = kwargs.get('hive_configuration', None) - self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) - self.oozie_configuration = kwargs.get('oozie_configuration', None) - self.storm_configuration = kwargs.get('storm_configuration', None) - self.yarn_configuration = kwargs.get('yarn_configuration', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.head_node_size = kwargs.get('head_node_size', None) - self.data_node_size = kwargs.get('data_node_size', None) - self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) - self.script_actions = kwargs.get('script_actions', None) - self.virtual_network_id = kwargs.get('virtual_network_id', None) - self.subnet_name = kwargs.get('subnet_name', None) - self.type = 'HDInsightOnDemand' - - -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightPigActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.type = 'HDInsightPig' - - -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for - all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of - the code/package to be executed. Type: string (or Expression with - resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading - the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. - Type: string (or Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightSparkActivity, self).__init__(**kwargs) - self.root_path = kwargs.get('root_path', None) - self.entry_file_path = kwargs.get('entry_file_path', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) - self.class_name = kwargs.get('class_name', None) - self.proxy_user = kwargs.get('proxy_user', None) - self.spark_config = kwargs.get('spark_config', None) - self.type = 'HDInsightSpark' - - -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or - Expression with resultType string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or - Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with - resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression - with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be - directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are - located. - :type file_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression - with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(HDInsightStreamingActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.mapper = kwargs.get('mapper', None) - self.reducer = kwargs.get('reducer', None) - self.input = kwargs.get('input', None) - self.output = kwargs.get('output', None) - self.file_paths = kwargs.get('file_paths', None) - self.file_linked_service = kwargs.get('file_linked_service', None) - self.combiner = kwargs.get('combiner', None) - self.command_environment = kwargs.get('command_environment', None) - self.defines = kwargs.get('defines', None) - self.type = 'HDInsightStreaming' - - -class HiveLinkedService(LinkedService): - """Hive Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Hive server, - separated by ';' for multiple hosts (only when serviceDiscoveryMode is - enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client - connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: - 'HiveServer1', 'HiveServer2', 'HiveThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Hive server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper - service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive - Server 2 nodes are added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL - queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HiveLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.service_discovery_mode = kwargs.get('service_discovery_mode', None) - self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) - self.use_native_query = kwargs.get('use_native_query', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Hive' - - -class HiveObjectDataset(Dataset): - """Hive Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with - resultType string). - :type table: object - :param hive_object_dataset_schema: The schema name of the Hive. Type: - string (or Expression with resultType string). - :type hive_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HiveObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) - self.type = 'HiveObject' - - -class HiveSource(CopySource): - """A copy activity Hive Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HiveSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'HiveSource' - - -class HttpDataset(Dataset): - """A file in an HTTP web server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL based on the URL in the - HttpLinkedService refers to an HTTP file Type: string (or Expression with - resultType string). - :type relative_url: object - :param request_method: The HTTP method for the HTTP request. Type: string - (or Expression with resultType string). - :type request_method: object - :param request_body: The body for the HTTP request. Type: string (or - Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. - request-header-name-1:request-header-value-1 - ... - request-header-name-n:request-header-value-n Type: string (or Expression - with resultType string). - :type additional_headers: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(HttpDataset, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) - self.type = 'HttpFile' - - -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the HTTP endpoint, e.g. - http://www.microsoft.com. Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: The authentication type to be used to connect - to the HTTP server. Possible values include: 'Basic', 'Anonymous', - 'Digest', 'Windows', 'ClientCertificate' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. - Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate - with EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for - ClientCertificate authentication. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate - authentication. Only valid for on-premises copy. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS - server SSL certificate. Default value is true. Type: boolean (or - Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.embedded_cert_data = kwargs.get('embedded_cert_data', None) - self.cert_thumbprint = kwargs.get('cert_thumbprint', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.type = 'HttpServer' - - -class HttpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param request_timeout: Specifies the timeout for a HTTP client to get - HTTP response from HTTP server. - :type request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpReadSettings, self).__init__(**kwargs) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.request_timeout = kwargs.get('request_timeout', None) - - -class HttpServerLocation(DatasetLocation): - """The location of http server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param relative_url: Specify the relativeUrl of http server. Type: string - (or Expression with resultType string) - :type relative_url: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpServerLocation, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - - -class HttpSource(CopySource): - """A copy activity source for an HTTP file. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param http_request_timeout: Specifies the timeout for a HTTP client to - get HTTP response from HTTP server. The default value is equivalent to - System.Net.HttpWebRequest.Timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HttpSource, self).__init__(**kwargs) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.type = 'HttpSource' - - -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with your Hubspot - application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially - authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially - authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HubspotLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.access_token = kwargs.get('access_token', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Hubspot' - - -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HubspotObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'HubspotObject' - - -class HubspotSource(CopySource): - """A copy activity Hubspot Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HubspotSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'HubspotSource' - - -class IfConditionActivity(ControlActivity): - """This activity evaluates a boolean expression and executes either the - activities under the ifTrueActivities property or the ifFalseActivities - property depending on the result of the expression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - This is used to determine the block of activities (ifTrueActivities or - ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is - evaluated to true. This is an optional property and if not provided, the - activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is - evaluated to false. This is an optional property and if not provided, the - activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, - } - - def __init__(self, **kwargs): - super(IfConditionActivity, self).__init__(**kwargs) - self.expression = kwargs.get('expression', None) - self.if_true_activities = kwargs.get('if_true_activities', None) - self.if_false_activities = kwargs.get('if_false_activities', None) - self.type = 'IfCondition' - - -class ImpalaLinkedService(LinkedService): - """Impala server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Impala server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Impala server uses to listen for client - connections. The default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Anonymous', 'SASLUsername', - 'UsernameAndPassword' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The - default value is anonymous when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using - UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ImpalaLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Impala' - - -class ImpalaObjectDataset(Dataset): - """Impala server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression - with resultType string). - :type table: object - :param impala_object_dataset_schema: The schema name of the Impala. Type: - string (or Expression with resultType string). - :type impala_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ImpalaObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) - self.type = 'ImpalaObject' - - -class ImpalaSource(CopySource): - """A copy activity Impala server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ImpalaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ImpalaSource' - - -class InformixLinkedService(LinkedService): - """Informix linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Informix as ODBC data store. Possible values are: Anonymous and Basic. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Informix' - - -class InformixSink(CopySink): - """A copy activity Informix sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'InformixSink' - - -class InformixSource(CopySource): - """A copy activity source for Informix. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'InformixSource' - - -class InformixTableDataset(Dataset): - """The Informix table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Informix table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(InformixTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'InformixTable' - - -class IntegrationRuntime(Model): - """Azure Data Factory nested object which serves as a compute resource for - activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} - } - - def __init__(self, **kwargs): - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.type = None - - -class IntegrationRuntimeAuthKeys(Model): - """The integration runtime authentication keys. - - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str - """ - - _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = kwargs.get('auth_key1', None) - self.auth_key2 = kwargs.get('auth_key2', None) - - -class IntegrationRuntimeComputeProperties(Model): - """The compute resource properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param location: The location for managed integration runtime. The - supported regions could be found on - https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities - :type location: str - :param node_size: The node size requirement to managed integration - runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed - integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count - per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties - """ - - _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.location = kwargs.get('location', None) - self.node_size = kwargs.get('node_size', None) - self.number_of_nodes = kwargs.get('number_of_nodes', None) - self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) - self.v_net_properties = kwargs.get('v_net_properties', None) - - -class IntegrationRuntimeConnectionInfo(Model): - """Connection information for encrypting the on-premises data source - credentials. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar service_token: The token generated in service. Callers use this - token to authenticate to integration runtime. - :vartype service_token: str - :ivar identity_cert_thumbprint: The integration runtime SSL certificate - thumbprint. Click-Once application uses it to do server validation. - :vartype identity_cert_thumbprint: str - :ivar host_service_uri: The on-premises integration runtime host URL. - :vartype host_service_uri: str - :ivar version: The integration runtime version. - :vartype version: str - :ivar public_key: The public key for encrypting a credential when - transferring the credential to the integration runtime. - :vartype public_key: str - :ivar is_identity_cert_exprired: Whether the identity certificate is - expired. - :vartype is_identity_cert_exprired: bool - """ - - _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.service_token = None - self.identity_cert_thumbprint = None - self.host_service_uri = None - self.version = None - self.public_key = None - self.is_identity_cert_exprired = None - - -class IntegrationRuntimeCustomSetupScriptProperties(Model): - """Custom setup script properties for a managed dedicated integration runtime. - - :param blob_container_uri: The URI of the Azure blob container that - contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString - """ - - _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = kwargs.get('blob_container_uri', None) - self.sas_token = kwargs.get('sas_token', None) - - -class IntegrationRuntimeDataProxyProperties(Model): - """Data proxy properties for a managed dedicated integration runtime. - - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: - ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str - """ - - _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = kwargs.get('connect_via', None) - self.staging_linked_service = kwargs.get('staging_linked_service', None) - self.path = kwargs.get('path', None) - - -class IntegrationRuntimeMonitoringData(Model): - """Get monitoring data response. - - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.nodes = kwargs.get('nodes', None) - - -class IntegrationRuntimeNodeIpAddress(Model): - """The IP address of self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar ip_address: The IP address of self-hosted integration runtime node. - :vartype ip_address: str - """ - - _validation = { - 'ip_address': {'readonly': True}, - } - - _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) - self.ip_address = None - - -class IntegrationRuntimeNodeMonitoringData(Model): - """Monitoring data for integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar available_memory_in_mb: Available memory (MB) on the integration - runtime node. - :vartype available_memory_in_mb: int - :ivar cpu_utilization: CPU percentage on the integration runtime node. - :vartype cpu_utilization: int - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar concurrent_jobs_running: The number of jobs currently running on the - integration runtime node. - :vartype concurrent_jobs_running: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - :ivar sent_bytes: Sent bytes on the integration runtime node. - :vartype sent_bytes: float - :ivar received_bytes: Received bytes on the integration runtime node. - :vartype received_bytes: float - """ - - _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_name = None - self.available_memory_in_mb = None - self.cpu_utilization = None - self.concurrent_jobs_limit = None - self.concurrent_jobs_running = None - self.max_concurrent_jobs = None - self.sent_bytes = None - self.received_bytes = None - - -class IntegrationRuntimeReference(Model): - """Integration runtime reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference" . - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "IntegrationRuntimeReference" - - def __init__(self, **kwargs): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) - - -class IntegrationRuntimeRegenerateKeyParameters(Model): - """Parameters to regenerate the authentication key. - - :param key_name: The name of the authentication key to regenerate. - Possible values include: 'authKey1', 'authKey2' - :type key_name: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName - """ - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = kwargs.get('key_name', None) - - -class IntegrationRuntimeResource(SubResource): - """Integration runtime resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class IntegrationRuntimeSsisCatalogInfo(Model): - """Catalog information for managed dedicated integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog - database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user - account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. - The valid values could be found in - https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible - values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' - :type catalog_pricing_tier: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - """ - - _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) - self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) - self.catalog_admin_password = kwargs.get('catalog_admin_password', None) - self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) - - -class IntegrationRuntimeSsisProperties(Model): - """SSIS properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_info: Catalog information for managed dedicated integration - runtime. - :type catalog_info: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. - Possible values include: 'BasePrice', 'LicenseIncluded' - :type license_type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for - a managed dedicated integration runtime. - :type custom_setup_script_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed - dedicated integration runtime. - :type data_proxy_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible - values include: 'Standard', 'Enterprise' - :type edition: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_info = kwargs.get('catalog_info', None) - self.license_type = kwargs.get('license_type', None) - self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) - self.data_proxy_properties = kwargs.get('data_proxy_properties', None) - self.edition = kwargs.get('edition', None) - - -class IntegrationRuntimeStatus(Model): - """Integration runtime status. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntimeStatus, - ManagedIntegrationRuntimeStatus - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.data_factory_name = None - self.state = None - self.type = None - - -class IntegrationRuntimeStatusListResponse(Model): - """A list of integration runtime status. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. - :type value: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class IntegrationRuntimeStatusResponse(Model): - """Integration runtime status response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The integration runtime name. - :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus - """ - - _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) - self.name = None - self.properties = kwargs.get('properties', None) - - -class IntegrationRuntimeVNetProperties(Model): - """VNet properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param v_net_id: The ID of the VNet that this integration runtime will - join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.v_net_id = kwargs.get('v_net_id', None) - self.subnet = kwargs.get('subnet', None) - - -class JiraLinkedService(LinkedService): - """Jira Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Jira service. - (e.g. jira.example.com) - :type host: object - :param port: The TCP port that the Jira server uses to listen for client - connections. The default value is 443 if connecting through HTTPS, or 8080 - if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JiraLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Jira' - - -class JiraObjectDataset(Dataset): - """Jira Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JiraObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'JiraObject' - - -class JiraSource(CopySource): - """A copy activity Jira Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JiraSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'JiraSource' - - -class JsonDataset(Dataset): - """Json dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not - specified, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, **kwargs): - super(JsonDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression = kwargs.get('compression', None) - self.type = 'Json' - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. To be more specific, the way of - separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - :param nesting_separator: The character used to separate nesting levels. - Default value is '.' (dot). Type: string (or Expression with resultType - string). - :type nesting_separator: object - :param encoding_name: The code page name of the preferred encoding. If not - provided, the default value is 'utf-8', unless the byte order mark (BOM) - denotes another Unicode encoding. The full list of supported values can be - found in the 'Name' column of the table of encodings in the following - reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param json_node_reference: The JSONPath of the JSON array element to be - flattened. Example: "$.ArrayPath". Type: string (or Expression with - resultType string). - :type json_node_reference: object - :param json_path_definition: The JSONPath definition for each column - mapping with a customized column name to extract data from JSON file. For - fields under root object, start with "$"; for fields inside the array - chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. - Type: object (or Expression with resultType object). - :type json_path_definition: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(JsonFormat, self).__init__(**kwargs) - self.file_pattern = kwargs.get('file_pattern', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.json_node_reference = kwargs.get('json_node_reference', None) - self.json_path_definition = kwargs.get('json_path_definition', None) - self.type = 'JsonFormat' - - -class JsonSink(CopySink): - """A copy activity Json sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, - } - - def __init__(self, **kwargs): - super(JsonSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) - self.type = 'JsonSink' - - -class JsonSource(CopySource): - """A copy activity Json source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(JsonSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'JsonSource' - - -class JsonWriteSettings(FormatWriteSettings): - """Json write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param file_pattern: File pattern of JSON. This setting controls the way a - collection of JSON objects will be treated. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonWriteFilePattern - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(JsonWriteSettings, self).__init__(**kwargs) - self.file_pattern = kwargs.get('file_pattern', None) - - -class LinkedIntegrationRuntime(Model): - """The linked integration runtime information. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: The name of the linked integration runtime. - :vartype name: str - :ivar subscription_id: The subscription ID for which the linked - integration runtime belong to. - :vartype subscription_id: str - :ivar data_factory_name: The name of the data factory for which the linked - integration runtime belong to. - :vartype data_factory_name: str - :ivar data_factory_location: The location of the data factory for which - the linked integration runtime belong to. - :vartype data_factory_location: str - :ivar create_time: The creating time of the linked integration runtime. - :vartype create_time: datetime - """ - - _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntime, self).__init__(**kwargs) - self.name = None - self.subscription_id = None - self.data_factory_name = None - self.data_factory_location = None - self.create_time = None - - -class LinkedIntegrationRuntimeType(Model): - """The base definition of a linked integration runtime. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, - LinkedIntegrationRuntimeKeyAuthorization - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - """ - - _validation = { - 'authorization_type': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - } - - _subtype_map = { - 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None - - -class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): - """The key authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.key = kwargs.get('key', None) - self.authorization_type = 'Key' - - -class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): - """The role based access control (RBAC) authorization type integration - runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration - runtime to be shared. - :type resource_id: str - """ - - _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.resource_id = kwargs.get('resource_id', None) - self.authorization_type = 'RBAC' - - -class LinkedIntegrationRuntimeRequest(Model): - """Data factory name for linked integration runtime request. - - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked - integration runtime. - :type linked_factory_name: str - """ - - _validation = { - 'linked_factory_name': {'required': True}, - } - - _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = kwargs.get('linked_factory_name', None) - - -class LinkedServiceReference(Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: - "LinkedServiceReference" . - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__(self, **kwargs): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) - - -class LinkedServiceResource(SubResource): - """Linked service resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__(self, **kwargs): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class LogStorageSettings(Model): - """Log storage settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity - execution. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) - - -class LookupActivity(ExecutionActivity): - """Lookup activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy - activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default - value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(LookupActivity, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.dataset = kwargs.get('dataset', None) - self.first_row_only = kwargs.get('first_row_only', None) - self.type = 'Lookup' - - -class MagentoLinkedService(LinkedService): - """Magento server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Magento instance. (i.e. - 192.168.222.110/magento3) - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MagentoLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Magento' - - -class MagentoObjectDataset(Dataset): - """Magento server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MagentoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MagentoObject' - - -class MagentoSource(CopySource): - """A copy activity Magento server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MagentoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MagentoSource' - - -class ManagedIntegrationRuntime(IntegrationRuntime): - """Managed integration runtime, including managed elastic and managed - dedicated integration runtimes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :ivar state: Integration runtime state, only valid for managed dedicated - integration runtime. Possible values include: 'Initial', 'Stopped', - 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', - 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param compute_properties: The compute resource for managed integration - runtime. - :type compute_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - """ - - _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntime, self).__init__(**kwargs) - self.state = None - self.compute_properties = kwargs.get('compute_properties', None) - self.ssis_properties = kwargs.get('ssis_properties', None) - self.type = 'Managed' - - -class ManagedIntegrationRuntimeError(Model): - """Error definition for managed integration runtime. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar time: The time when the error occurred. - :vartype time: datetime - :ivar code: Error code. - :vartype code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar message: Error message. - :vartype message: str - """ - - _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.time = None - self.code = None - self.parameters = None - self.message = None - - -class ManagedIntegrationRuntimeNode(Model): - """Properties of integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_id: The managed integration runtime node id. - :vartype node_id: str - :ivar status: The managed integration runtime node status. Possible values - include: 'Starting', 'Available', 'Recycling', 'Unavailable' - :vartype status: str or - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - """ - - _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_id = None - self.status = None - self.errors = kwargs.get('errors', None) - - -class ManagedIntegrationRuntimeOperationResult(Model): - """Properties of managed integration runtime operation result. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar type: The operation type. Could be start or stop. - :vartype type: str - :ivar start_time: The start time of the operation. - :vartype start_time: datetime - :ivar result: The operation result. - :vartype result: str - :ivar error_code: The error code. - :vartype error_code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar activity_id: The activity id for the operation request. - :vartype activity_id: str - """ - - _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - self.start_time = None - self.result = None - self.error_code = None - self.parameters = None - self.activity_id = None - - -class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Managed integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] - :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - :ivar last_operation: The last operation result that occurred on this - integration runtime. - :vartype last_operation: - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, - } - - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.create_time = None - self.nodes = None - self.other_errors = None - self.last_operation = None - self.type = 'Managed' - - -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MariaDBLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MariaDB' - - -class MariaDBSource(CopySource): - """A copy activity MariaDB server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MariaDBSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MariaDBSource' - - -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MariaDBTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MariaDBTable' - - -class MarketoLinkedService(LinkedService): - """Marketo server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Marketo server. (i.e. - 123-ABC-321.mktorest.com) - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MarketoLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Marketo' - - -class MarketoObjectDataset(Dataset): - """Marketo server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MarketoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MarketoObject' - - -class MarketoSource(CopySource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MarketoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MarketoSource' - - -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Microsoft Access as ODBC data store. Possible values are: Anonymous and - Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MicrosoftAccess' - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'MicrosoftAccessSink' - - -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MicrosoftAccessSource' - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MicrosoftAccessTable' - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. The table name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.collection_name = kwargs.get('collection_name', None) - self.type = 'MongoDbCollection' - - -class MongoDbCursorMethodsProperties(Model): - """Cursor methods for Mongodb query. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param project: Specifies the fields to return in the documents that match - the query filter. To return all fields in the matching documents, omit - this parameter. Type: string (or Expression with resultType string). - :type project: object - :param sort: Specifies the order in which the query returns matching - documents. Type: string (or Expression with resultType string). Type: - string (or Expression with resultType string). - :type sort: object - :param skip: Specifies the how many documents skipped and where MongoDB - begins returning results. This approach may be useful in implementing - paginated results. Type: integer (or Expression with resultType integer). - :type skip: object - :param limit: Specifies the maximum number of documents the server - returns. limit() is analogous to the LIMIT statement in a SQL database. - Type: integer (or Expression with resultType integer). - :type limit: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.project = kwargs.get('project', None) - self.sort = kwargs.get('sort', None) - self.skip = kwargs.get('skip', None) - self.limit = kwargs.get('limit', None) - - -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The IP address or server name of the MongoDB - server. Type: string (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the MongoDB database. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you - want to access. Type: string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: - string (or Expression with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen - for client connections. The default value is 27017. Type: integer (or - Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.database_name = kwargs.get('database_name', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.auth_source = kwargs.get('auth_source', None) - self.port = kwargs.get('port', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MongoDb' - - -class MongoDbSource(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression. Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MongoDbSource' - - -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.collection = kwargs.get('collection', None) - self.type = 'MongoDbV2Collection' - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The MongoDB connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want - to access. Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.database = kwargs.get('database', None) - self.type = 'MongoDbV2' - - -class MongoDbV2Source(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MongoDbV2Source, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) - self.type = 'MongoDbV2Source' - - -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MySqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'MySql' - - -class MySqlSource(CopySource): - """A copy activity source for MySQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MySqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'MySqlSource' - - -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The MySQL table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(MySqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'MySqlTable' - - -class NetezzaLinkedService(LinkedService): - """Netezza linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(NetezzaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Netezza' - - -class NetezzaPartitionSettings(Model): - """The settings that will be leveraged for Netezza source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class NetezzaSource(CopySource): - """A copy activity Netezza source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - :param partition_option: The partition mechanism that will be used for - Netezza read in parallel. Possible values include: 'None', 'DataSlice', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.NetezzaPartitionOption - :param partition_settings: The settings that will be leveraged for Netezza - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.NetezzaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, - } - - def __init__(self, **kwargs): - super(NetezzaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'NetezzaSource' - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression - with resultType string). - :type table: object - :param netezza_table_dataset_schema: The schema name of the Netezza. Type: - string (or Expression with resultType string). - :type netezza_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(NetezzaTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.netezza_table_dataset_schema = kwargs.get('netezza_table_dataset_schema', None) - self.type = 'NetezzaTable' - - -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the OData service endpoint. Type: string - (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', - 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) - under which your application resides. Type: string (or Expression with - resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your - application registered in Azure Active Directory. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting - authorization to use Directory. Type: string (or Expression with - resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type - (key or cert) is used for service principal. Possible values include: - 'ServicePrincipalKey', 'ServicePrincipalCert' - :type aad_service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application - registered in Azure Active Directory. Type: string (or Expression with - resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded - certificate of your application registered in Azure Active Directory. - Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: - ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of - your certificate if your certificate has a password and you are using - AadServicePrincipal authentication. Type: string (or Expression with - resultType string). - :type service_principal_embedded_cert_password: - ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ODataLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.tenant = kwargs.get('tenant', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) - self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'OData' - - -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: The OData resource path. Type: string (or Expression with - resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ODataResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'ODataResource' - - -class ODataSource(CopySource): - """A copy activity source for OData source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ODataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ODataSource' - - -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - ODBC data store. Possible values are: Anonymous and Basic. Type: string - (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Odbc' - - -class OdbcSink(CopySink): - """A copy activity ODBC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'OdbcSink' - - -class OdbcSource(CopySource): - """A copy activity source for ODBC databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'OdbcSource' - - -class OdbcTableDataset(Dataset): - """The ODBC table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The ODBC table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OdbcTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'OdbcTable' - - -class Office365Dataset(Dataset): - """The Office365 account. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. Name of the dataset to extract from Office - 365. Type: string (or Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the - specific rows to extract from Office 365. Type: string (or Expression with - resultType string). - :type predicate: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Office365Dataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.predicate = kwargs.get('predicate', None) - self.type = 'Office365Table' - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param office365_tenant_id: Required. Azure tenant ID to which the Office - 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant - information under which your Azure AD web application resides. Type: - string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Office365LinkedService, self).__init__(**kwargs) - self.office365_tenant_id = kwargs.get('office365_tenant_id', None) - self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Office365' - - -class Office365Source(CopySource): - """A copy activity source for an Office365 service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param allowed_groups: The groups containing all the users. Type: array of - strings (or Expression with resultType array of strings). - :type allowed_groups: object - :param user_scope_filter_uri: The user scope uri. Type: string (or - Expression with resultType string). - :type user_scope_filter_uri: object - :param date_filter_column: The Column to apply the and . Type: string (or - Expression with resultType string). - :type date_filter_column: object - :param start_time: Start time of the requested range for this dataset. - Type: string (or Expression with resultType string). - :type start_time: object - :param end_time: End time of the requested range for this dataset. Type: - string (or Expression with resultType string). - :type end_time: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(Office365Source, self).__init__(**kwargs) - self.allowed_groups = kwargs.get('allowed_groups', None) - self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) - self.date_filter_column = kwargs.get('date_filter_column', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.type = 'Office365Source' - - -class Operation(Model): - """Azure Data Factory API operation definition. - - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: - ~azure.mgmt.datafactory.models.OperationServiceSpecification - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, - } - - def __init__(self, **kwargs): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.origin = kwargs.get('origin', None) - self.display = kwargs.get('display', None) - self.service_specification = kwargs.get('service_specification', None) - - -class OperationDisplay(Model): - """Metadata associated with the operation. - - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is - performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OperationDisplay, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) - - -class OperationLogSpecification(Model): - """Details about an operation related to logs. - - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.blob_duration = kwargs.get('blob_duration', None) - - -class OperationMetricAvailability(Model): - """Defines how often data for a metric becomes available. - - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = kwargs.get('time_grain', None) - self.blob_duration = kwargs.get('blob_duration', None) - - -class OperationMetricDimension(Model): - """Defines the metric dimension. - - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be - exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) - - -class OperationMetricSpecification(Model): - """Details about an operation related to metrics. - - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using - regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes - available. - :type availabilities: - list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: - list[~azure.mgmt.datafactory.models.OperationMetricDimension] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, - } - - def __init__(self, **kwargs): - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.display_description = kwargs.get('display_description', None) - self.unit = kwargs.get('unit', None) - self.aggregation_type = kwargs.get('aggregation_type', None) - self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) - self.source_mdm_account = kwargs.get('source_mdm_account', None) - self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) - self.availabilities = kwargs.get('availabilities', None) - self.dimensions = kwargs.get('dimensions', None) - - -class OperationServiceSpecification(Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, - } - - def __init__(self, **kwargs): - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = kwargs.get('log_specifications', None) - self.metric_specifications = kwargs.get('metric_specifications', None) - - -class OracleLinkedService(LinkedService): - """Oracle database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Oracle' - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for Oracle source partitioning. - - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = kwargs.get('partition_names', None) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle - Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name - that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'OracleServiceCloud' - - -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'OracleServiceCloudObject' - - -class OracleServiceCloudSource(CopySource): - """A copy activity Oracle Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleServiceCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'OracleServiceCloudSource' - - -class OracleSink(CopySink): - """A copy activity Oracle sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.type = 'OracleSink' - - -class OracleSource(CopySource): - """A copy activity Oracle source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param oracle_reader_query: Oracle reader query. Type: string (or - Expression with resultType string). - :type oracle_reader_query: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - Oracle read in parallel. Possible values include: 'None', - 'PhysicalPartitionsOfTable', 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.OraclePartitionOption - :param partition_settings: The settings that will be leveraged for Oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - } - - def __init__(self, **kwargs): - super(OracleSource, self).__init__(**kwargs) - self.oracle_reader_query = kwargs.get('oracle_reader_query', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'OracleSource' - - -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param oracle_table_dataset_schema: The schema name of the on-premises - Oracle database. Type: string (or Expression with resultType string). - :type oracle_table_dataset_schema: object - :param table: The table name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(OracleTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'OracleTable' - - -class OrcFormat(DatasetStorageFormat): - """The data stored in Optimized Row Columnar (ORC) format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OrcFormat, self).__init__(**kwargs) - self.type = 'OrcFormat' - - -class ParameterSpecification(Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: 'Object', - 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.default_value = kwargs.get('default_value', None) - - -class ParquetDataset(Dataset): - """Parquet dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: - :type compression_codec: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ParquetDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.type = 'Parquet' - - -class ParquetFormat(DatasetStorageFormat): - """The data stored in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ParquetFormat, self).__init__(**kwargs) - self.type = 'ParquetFormat' - - -class ParquetSink(CopySink): - """A copy activity Parquet sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, **kwargs): - super(ParquetSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'ParquetSink' - - -class ParquetSource(CopySource): - """A copy activity Parquet source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, **kwargs): - super(ParquetSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.type = 'ParquetSource' - - -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the PayPal instance. (i.e. - api.sandbox.paypal.com) - :type host: object - :param client_id: Required. The client ID associated with your PayPal - application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PaypalLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Paypal' - - -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PaypalObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'PaypalObject' - - -class PaypalSource(CopySource): - """A copy activity Paypal Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PaypalSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PaypalSource' - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Phoenix server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for - client connections. The default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. - (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix - if using WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Phoenix server. Possible values include: 'Anonymous', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PhoenixLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Phoenix' - - -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression - with resultType string). - :type table: object - :param phoenix_object_dataset_schema: The schema name of the Phoenix. - Type: string (or Expression with resultType string). - :type phoenix_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PhoenixObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) - self.type = 'PhoenixObject' - - -class PhoenixSource(CopySource): - """A copy activity Phoenix server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PhoenixSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PhoenixSource' - - -class PipelineFolder(Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear - at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class PipelineReference(Model): - """Pipeline reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: - "PipelineReference" . - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - type = "PipelineReference" - - def __init__(self, **kwargs): - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.name = kwargs.get('name', None) - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, - ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the - Pipeline. - :type annotations: list[object] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] - :param folder: The folder that this Pipeline is in. If not specified, - Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - } - - def __init__(self, **kwargs): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.activities = kwargs.get('activities', None) - self.parameters = kwargs.get('parameters', None) - self.variables = kwargs.get('variables', None) - self.concurrency = kwargs.get('concurrency', None) - self.annotations = kwargs.get('annotations', None) - self.run_dimensions = kwargs.get('run_dimensions', None) - self.folder = kwargs.get('folder', None) - - -class PipelineRun(Model): - """Information about a pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a - pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in - its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair - used in the pipeline run. - :vartype parameters: dict[str, str] - :ivar run_dimensions: Run dimensions emitted by Pipeline run. - :vartype run_dimensions: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event - in ISO8601 format. - :vartype last_updated: datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str - """ - - _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'run_dimensions': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.run_dimensions = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None - - -class PipelineRunInvokedBy(Model): - """Provides entity name and id that started the pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None - - -class PipelineRunsQueryResponse(Model): - """A list pipeline runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) - - -class PolybaseSettings(Model): - """PolyBase settings. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param reject_type: Reject type. Possible values include: 'value', - 'percentage' - :type reject_type: str or - ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that - can be rejected before the query fails. Type: number (or Expression with - resultType number), minimum: 0. - :type reject_value: object - :param reject_sample_value: Determines the number of rows to attempt to - retrieve before the PolyBase recalculates the percentage of rejected rows. - Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object - :param use_type_default: Specifies how to handle missing values in - delimited text files when PolyBase retrieves data from the text file. - Type: boolean (or Expression with resultType boolean). - :type use_type_default: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reject_type = kwargs.get('reject_type', None) - self.reject_value = kwargs.get('reject_value', None) - self.reject_sample_value = kwargs.get('reject_sample_value', None) - self.use_type_default = kwargs.get('use_type_default', None) - - -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'PostgreSql' - - -class PostgreSqlSource(CopySource): - """A copy activity source for PostgreSQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PostgreSqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PostgreSqlSource' - - -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with - resultType string). - :type table: object - :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: - string (or Expression with resultType string). - :type postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.postgre_sql_table_dataset_schema = kwargs.get('postgre_sql_table_dataset_schema', None) - self.type = 'PostgreSqlTable' - - -class PrestoLinkedService(LinkedService): - """Presto server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Presto server. - (i.e. 192.168.222.160) - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. - 0.148-t) - :type server_version: object - :param catalog: Required. The catalog context for all request against the - server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client - connections. The default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid - values for this option are specified in the IANA Time Zone Database. The - default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PrestoLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.server_version = kwargs.get('server_version', None) - self.catalog = kwargs.get('catalog', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.time_zone_id = kwargs.get('time_zone_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Presto' - - -class PrestoObjectDataset(Dataset): - """Presto server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression - with resultType string). - :type table: object - :param presto_object_dataset_schema: The schema name of the Presto. Type: - string (or Expression with resultType string). - :type presto_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PrestoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) - self.type = 'PrestoObject' - - -class PrestoSource(CopySource): - """A copy activity Presto server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(PrestoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'PrestoSource' - - -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com) - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to - authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 - authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 - authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 - authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth - 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(QuickBooksLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.company_id = kwargs.get('company_id', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.consumer_secret = kwargs.get('consumer_secret', None) - self.access_token = kwargs.get('access_token', None) - self.access_token_secret = kwargs.get('access_token_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'QuickBooks' - - -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'QuickBooksObject' - - -class QuickBooksSource(CopySource): - """A copy activity QuickBooks server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(QuickBooksSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'QuickBooksSource' - - -class RecurrenceSchedule(Model): - """The recurrence schedule. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, - } - - def __init__(self, **kwargs): - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.minutes = kwargs.get('minutes', None) - self.hours = kwargs.get('hours', None) - self.week_days = kwargs.get('week_days', None) - self.month_days = kwargs.get('month_days', None) - self.monthly_occurrences = kwargs.get('monthly_occurrences', None) - - -class RecurrenceScheduleOccurrence(Model): - """The recurrence schedule occurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param day: The day of the week. Possible values include: 'Sunday', - 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'DayOfWeek'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.day = kwargs.get('day', None) - self.occurrence = kwargs.get('occurrence', None) - - -class RedirectIncompatibleRowSettings(Model): - """Redirect incompatible row settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage - SAS, or Azure Data Lake Store linked service used for redirecting - incompatible row. Must be specified if redirectIncompatibleRowSettings is - specified. Type: string (or Expression with resultType string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) - - -class RedshiftUnloadSettings(Model): - """The Amazon S3 settings needed for the interim Amazon S3 when copying from - Amazon Redshift with unload. With this, data from Amazon Redshift source - will be unloaded into S3 first and then copied into the targeted sink from - the interim S3. - - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked - service which will be used for the unload operation when copying from the - Amazon Redshift source. - :type s3_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which - will be used to store the unloaded data from Amazon Redshift source. The - bucket must be in the same region as the Amazon Redshift source. Type: - string (or Expression with resultType string). - :type bucket_name: object - """ - - _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) - self.bucket_name = kwargs.get('bucket_name', None) - - -class RelationalSource(CopySource): - """A copy activity source for various relational databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RelationalSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'RelationalSource' - - -class RelationalTableDataset(Dataset): - """The relational table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The relational table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RelationalTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'RelationalTable' - - -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, **kwargs): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows - from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period - for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: datetime - :param requested_end_time: Required. The end time for the time period for - which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.parent_trigger = kwargs.get('parent_trigger', None) - self.requested_start_time = kwargs.get('requested_start_time', None) - self.requested_end_time = kwargs.get('requested_end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.type = 'RerunTumblingWindowTrigger' - - -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - - -class ResponsysLinkedService(LinkedService): - """Responsys linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ResponsysLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Responsys' - - -class ResponsysObjectDataset(Dataset): - """Responsys dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ResponsysObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ResponsysObject' - - -class ResponsysSource(CopySource): - """A copy activity Responsys source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ResponsysSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ResponsysSource' - - -class RestResourceDataset(Dataset): - """A Rest service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL to the resource that the RESTful API - provides. Type: string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RestResourceDataset, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - self.type = 'RestResource' - - -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server - side SSL certificate when connecting to the endpoint.The default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to - connect to the REST service. Possible values include: 'Anonymous', - 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: object - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The application's client ID used in - AadServicePrincipal authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in - AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in - AadServicePrincipal authentication type under which your application - resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to - use. - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RestServiceLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'RestService' - - -class RestSource(CopySource): - """A copy activity Rest service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP - response. It is the timeout to get a response, not the timeout to read - response data. Default value: 00:01:40. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next page - request. - :type request_interval: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(RestSource, self).__init__(**kwargs) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.request_interval = kwargs.get('request_interval', None) - self.type = 'RestSource' - - -class RetryPolicy(Model): - """Execution policy for an activity. - - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type count: object - :param interval_in_seconds: Interval between retries in seconds. Default - is 30. - :type interval_in_seconds: int - """ - - _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RetryPolicy, self).__init__(**kwargs) - self.count = kwargs.get('count', None) - self.interval_in_seconds = kwargs.get('interval_in_seconds', None) - - -class RunFilterParameters(Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next - page of results. Null for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run - event was updated in 'ISO 8601' format. - :type last_updated_after: datetime - :param last_updated_before: Required. The time at or before which the run - event was updated in 'ISO 8601' format. - :type last_updated_before: datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__(self, **kwargs): - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = kwargs.get('continuation_token', None) - self.last_updated_after = kwargs.get('last_updated_after', None) - self.last_updated_before = kwargs.get('last_updated_before', None) - self.filters = kwargs.get('filters', None) - self.order_by = kwargs.get('order_by', None) - - -class RunQueryFilter(Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The - allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd - and Status; to query activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd, ActivityType and Status, and to query trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values - include: 'Equals', 'NotEquals', 'In', 'NotIn' - :type operator: str or - ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__(self, **kwargs): - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = kwargs.get('operand', None) - self.operator = kwargs.get('operator', None) - self.values = kwargs.get('values', None) - - -class RunQueryOrderBy(Model): - """An object to provide order by options for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The - allowed parameters to order by for pipeline runs are PipelineName, - RunStart, RunEnd and Status; for activity runs are ActivityName, - ActivityRunStart, ActivityRunEnd and Status; for trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values - include: 'ASC', 'DESC' - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder - """ - - _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, - } - - _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = kwargs.get('order_by', None) - self.order = kwargs.get('order', None) - - -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, - for example, 'https://[domain].my.salesforce.com'. Type: string (or - Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceLinkedService, self).__init__(**kwargs) - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Salesforce' - - -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_id: object - :param client_secret: The client secret associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SalesforceMarketingCloud' - - -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SalesforceMarketingCloudObject' - - -class SalesforceMarketingCloudSource(CopySource): - """A copy activity Salesforce Marketing Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceMarketingCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SalesforceMarketingCloudSource' - - -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceObjectDataset, self).__init__(**kwargs) - self.object_api_name = kwargs.get('object_api_name', None) - self.type = 'SalesforceObject' - - -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce Service Cloud instance. - Default is 'https://login.salesforce.com'. To copy data from sandbox, - specify 'https://test.salesforce.com'. To copy data from custom domain, - specify, for example, 'https://[domain].my.salesforce.com'. Type: string - (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param extended_properties: Extended properties appended to the connection - string. Type: string (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SalesforceServiceCloud' - - -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce Service Cloud object API name. - Type: string (or Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.object_api_name = kwargs.get('object_api_name', None) - self.type = 'SalesforceServiceCloudObject' - - -class SalesforceServiceCloudSink(CopySink): - """A copy activity Salesforce Service Cloud sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'SalesforceServiceCloudSink' - - -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SalesforceServiceCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) - self.type = 'SalesforceServiceCloudSource' - - -class SalesforceSink(CopySink): - """A copy activity Salesforce sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SalesforceSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) - self.type = 'SalesforceSink' - - -class SalesforceSource(CopySource): - """A copy activity Salesforce source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SalesforceSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) - self.type = 'SalesforceSource' - - -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapBwCubeDataset, self).__init__(**kwargs) - self.type = 'SapBwCube' - - -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance. Type: string - (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a - two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. - (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapBWLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapBW' - - -class SapBwSource(CopySource): - """A copy activity source for SapBW server via MDX. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: MDX query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapBwSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SapBwSource' - - -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP Cloud for Customer OData API. For - example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: - string (or Expression with resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: object - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapCloudForCustomer' - - -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP Cloud for Customer OData - entity. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'SapCloudForCustomerResource' - - -class SapCloudForCustomerSink(CopySink): - """A copy activity SAP Cloud for Customer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.type = 'SapCloudForCustomerSink' - - -class SapCloudForCustomerSource(CopySource): - """A copy activity source for SAP Cloud for Customer source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP Cloud for Customer OData query. For example, "$top=1". - Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapCloudForCustomerSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SapCloudForCustomerSource' - - -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or - Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapEccLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapEcc' - - -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP ECC OData entity. Type: string - (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapEccResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - self.type = 'SapEccResource' - - -class SapEccSource(CopySource): - """A copy activity source for SAP ECC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapEccSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SapEccSource' - - -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: SAP HANA ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the SAP HANA server. Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapHanaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapHana' - - -class SapHanaSource(CopySource): - """A copy activity source for SAP HANA source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP HANA Sql query. Type: string (or Expression with - resultType string). - :type query: object - :param packet_size: The packet size of data read from SAP HANA. Type: - integer(or Expression with resultType integer). - :type packet_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapHanaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.packet_size = kwargs.get('packet_size', None) - self.type = 'SapHanaSource' - - -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: - string (or Expression with resultType string). - :type sap_hana_table_dataset_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapHanaTableDataset, self).__init__(**kwargs) - self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'SapHanaTable' - - -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance where the open - hub destination is located. Type: string (or Expression with resultType - string). - :type server: object - :param system_number: Required. System number of the BW system where the - open hub destination is located. (Usually a two-digit decimal number - represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where - the open hub destination is located. (Usually a three-digit decimal number - represented as a string) Type: string (or Expression with resultType - string). - :type client_id: object - :param language: Language of the BW system where the open hub destination - is located. The default value is EN. Type: string (or Expression with - resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub - destination is located. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to access the SAP BW server where the open hub - destination is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapOpenHub' - - -class SapOpenHubSource(CopySource): - """A copy activity source for SAP Business Warehouse Open Hub Destination - source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapOpenHubSource, self).__init__(**kwargs) - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) - self.type = 'SapOpenHubSource' - - -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param open_hub_destination_name: Required. The name of the Open Hub - Destination with destination type as Database Table. Type: string (or - Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) - self.type = 'SapOpenHubTable' - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Host name of the SAP instance where the table is located. - Type: string (or Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is - located. (Usually a two-digit decimal number represented as a string.) - Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the - table is located. (Usually a three-digit decimal number represented as a - string) Type: string (or Expression with resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. - The default value is EN. Type: string (or Expression with resultType - string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. - Type: string (or Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is - located. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: - string (or Expression with resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the - Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where - the table is located. Must be either 0 (off) or 1 (on). Type: string (or - Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where - the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, - 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or - Expression with resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTableLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.system_id = kwargs.get('system_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.message_server = kwargs.get('message_server', None) - self.message_server_service = kwargs.get('message_server_service', None) - self.snc_mode = kwargs.get('snc_mode', None) - self.snc_my_name = kwargs.get('snc_my_name', None) - self.snc_partner_name = kwargs.get('snc_partner_name', None) - self.snc_library_path = kwargs.get('snc_library_path', None) - self.snc_qop = kwargs.get('snc_qop', None) - self.logon_group = kwargs.get('logon_group', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SapTable' - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - self.max_partitions_number = kwargs.get('max_partitions_number', None) - - -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The name of the SAP Table. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SapTableResourceDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SapTableResource' - - -class SapTableSource(CopySource): - """A copy activity source for SAP Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param row_count: The number of rows to be retrieved. Type: integer(or - Expression with resultType integer). - :type row_count: object - :param row_skips: The number of rows that will be skipped. Type: integer - (or Expression with resultType integer). - :type row_skips: object - :param rfc_table_fields: The fields of the SAP table that will be - retrieved. For example, column0, column1. Type: string (or Expression with - resultType string). - :type rfc_table_fields: object - :param rfc_table_options: The options for the filtering of the SAP Table. - For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with - resultType string). - :type rfc_table_options: object - :param batch_size: Specifies the maximum number of rows that will be - retrieved at a time when retrieving data from SAP Table. Type: integer (or - Expression with resultType integer). - :type batch_size: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC - function module that will be used to read data from SAP Table. Type: - string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. Possible values include: 'None', 'PartitionOnInt', - 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', - 'PartitionOnCalendarDate', 'PartitionOnTime' - :type partition_option: str or - ~azure.mgmt.datafactory.models.SapTablePartitionOption - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, - } - - def __init__(self, **kwargs): - super(SapTableSource, self).__init__(**kwargs) - self.row_count = kwargs.get('row_count', None) - self.row_skips = kwargs.get('row_skips', None) - self.rfc_table_fields = kwargs.get('rfc_table_fields', None) - self.rfc_table_options = kwargs.get('rfc_table_options', None) - self.batch_size = kwargs.get('batch_size', None) - self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'SapTableSource' - - -class ScheduleTrigger(MultiplePipelineTrigger): - """Trigger that creates pipeline runs periodically, on schedule. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'recurrence': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, - } - - def __init__(self, **kwargs): - super(ScheduleTrigger, self).__init__(**kwargs) - self.recurrence = kwargs.get('recurrence', None) - self.type = 'ScheduleTrigger' - - -class ScheduleTriggerRecurrence(Model): - """The workflow trigger recurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param frequency: The frequency. Possible values include: 'NotSpecified', - 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: datetime - :param end_time: The end time. - :type end_time: datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__(self, **kwargs): - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.time_zone = kwargs.get('time_zone', None) - self.schedule = kwargs.get('schedule', None) - - -class ScriptAction(Model): - """Custom script action to run on HDI ondemand cluster once it's up. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should - be executed. - :type roles: object - :param parameters: The parameters for the script action. - :type parameters: str - """ - - _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ScriptAction, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.uri = kwargs.get('uri', None) - self.roles = kwargs.get('roles', None) - self.parameters = kwargs.get('parameters', None) - - -class SecureString(SecretBase): - """Azure Data Factory secure string definition. The string value will be - masked with asterisks '*' during Get or List API calls. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SecureString, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.type = 'SecureString' - - -class SelfDependencyTumblingWindowTriggerReference(DependencyReference): - """Self referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling - window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) - self.type = 'SelfDependencyTumblingWindowTriggerReference' - - -class SelfHostedIntegrationRuntime(IntegrationRuntime): - """Self-hosted integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param linked_info: - :type linked_info: - ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, - } - - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) - self.linked_info = kwargs.get('linked_info', None) - self.type = 'SelfHosted' - - -class SelfHostedIntegrationRuntimeNode(Model): - """Properties of Self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar machine_name: Machine name of the integration runtime node. - :vartype machine_name: str - :ivar host_service_uri: URI for the host machine of the integration - runtime. - :vartype host_service_uri: str - :ivar status: Status of the integration runtime node. Possible values - include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', - 'Initializing', 'InitializeFailed' - :vartype status: str or - ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus - :ivar capabilities: The integration runtime capabilities dictionary - :vartype capabilities: dict[str, str] - :ivar version_status: Status of the integration runtime node version. - :vartype version_status: str - :ivar version: Version of the integration runtime node. - :vartype version: str - :ivar register_time: The time at which the integration runtime node was - registered in ISO8601 format. - :vartype register_time: datetime - :ivar last_connect_time: The most recent time at which the integration - runtime was connected in ISO8601 format. - :vartype last_connect_time: datetime - :ivar expiry_time: The time at which the integration runtime will expire - in ISO8601 format. - :vartype expiry_time: datetime - :ivar last_start_time: The time the node last started up. - :vartype last_start_time: datetime - :ivar last_stop_time: The integration runtime node last stop time. - :vartype last_stop_time: datetime - :ivar last_update_result: The result of the last integration runtime node - update. Possible values include: 'None', 'Succeed', 'Fail' - :vartype last_update_result: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult - :ivar last_start_update_time: The last time for the integration runtime - node update start. - :vartype last_start_update_time: datetime - :ivar last_end_update_time: The last time for the integration runtime node - update end. - :vartype last_end_update_time: datetime - :ivar is_active_dispatcher: Indicates whether this node is the active - dispatcher for integration runtime requests. - :vartype is_active_dispatcher: bool - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - """ - - _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.node_name = None - self.machine_name = None - self.host_service_uri = None - self.status = None - self.capabilities = None - self.version_status = None - self.version = None - self.register_time = None - self.last_connect_time = None - self.expiry_time = None - self.last_start_time = None - self.last_stop_time = None - self.last_update_result = None - self.last_start_update_time = None - self.last_end_update_time = None - self.is_active_dispatcher = None - self.concurrent_jobs_limit = None - self.max_concurrent_jobs = None - - -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Self-hosted integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar task_queue_id: The task queue id of the integration runtime. - :vartype task_queue_id: str - :ivar internal_channel_encryption: It is used to set the encryption mode - for node-node communication channel (when more than 2 self-hosted - integration runtime nodes exist). Possible values include: 'NotSet', - 'SslEncrypted', 'NotEncrypted' - :vartype internal_channel_encryption: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode - :ivar version: Version of the integration runtime. - :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: - list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] - :ivar scheduled_update_date: The date at which the integration runtime - will be scheduled to update, in ISO8601 format. - :vartype scheduled_update_date: datetime - :ivar update_delay_offset: The time in the date scheduled by service to - update the integration runtime, e.g., PT03H is 3 hours - :vartype update_delay_offset: str - :ivar local_time_zone_offset: The local time zone offset in hours. - :vartype local_time_zone_offset: str - :ivar capabilities: Object with additional information about integration - runtime capabilities. - :vartype capabilities: dict[str, str] - :ivar service_urls: The URLs for the services used in integration runtime - backend service. - :vartype service_urls: list[str] - :ivar auto_update: Whether Self-hosted integration runtime auto update has - been turned on. Possible values include: 'On', 'Off' - :vartype auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :ivar version_status: Status of the integration runtime version. - :vartype version_status: str - :param links: The list of linked integration runtimes that are created to - share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] - :ivar pushed_version: The version that the integration runtime is going to - update to. - :vartype pushed_version: str - :ivar latest_version: The latest version on download center. - :vartype latest_version: str - :ivar auto_update_eta: The estimated time when the self-hosted integration - runtime will be updated. - :vartype auto_update_eta: datetime - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) - self.create_time = None - self.task_queue_id = None - self.internal_channel_encryption = None - self.version = None - self.nodes = kwargs.get('nodes', None) - self.scheduled_update_date = None - self.update_delay_offset = None - self.local_time_zone_offset = None - self.capabilities = None - self.service_urls = None - self.auto_update = None - self.version_status = None - self.links = kwargs.get('links', None) - self.pushed_version = None - self.latest_version = None - self.auto_update_eta = None - self.type = 'SelfHosted' - - -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - .service-now.com) - :type endpoint: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Basic', 'OAuth2' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server - for Basic and OAuth2 authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and - OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ServiceNowLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'ServiceNow' - - -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ServiceNowObject' - - -class ServiceNowSource(CopySource): - """A copy activity ServiceNow server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ServiceNowSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ServiceNowSource' - - -class SetVariableActivity(ControlActivity): - """Set value for a Variable. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SetVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - self.type = 'SetVariable' - - -class SftpLocation(DatasetLocation): - """The location of SFTP dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpLocation, self).__init__(**kwargs) - - -class SftpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - - -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. . - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The SFTP server host name. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for - client connections. Default value is 22. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'SshPublicKey' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic - authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey - authentication. Only valid for on-premises copy. For on-premises copy with - SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent - should be specified. SSH private key should be OpenSSH format. Type: - string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for - SshPublicKey authentication. For on-premises copy with SshPublicKey - authentication, either PrivateKeyPath or PrivateKeyContent should be - specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH - private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key - validation. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. - When SkipHostKeyValidation is false, HostKeyFingerprint should be - specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.private_key_path = kwargs.get('private_key_path', None) - self.private_key_content = kwargs.get('private_key_content', None) - self.pass_phrase = kwargs.get('pass_phrase', None) - self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) - self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) - self.type = 'Sftp' - - -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Shopify server. (i.e. - mystore.myshopify.com) - :type host: object - :param access_token: The API access token that can be used to access - Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ShopifyLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Shopify' - - -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ShopifyObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ShopifyObject' - - -class ShopifySource(CopySource): - """A copy activity Shopify Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ShopifySource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ShopifySource' - - -class SparkLinkedService(LinkedService): - """Spark Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Spark server - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen - for client connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: - 'SharkServer', 'SharkServer2', 'SparkThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Spark server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SparkLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Spark' - - -class SparkObjectDataset(Dataset): - """Spark Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression - with resultType string). - :type table: object - :param spark_object_dataset_schema: The schema name of the Spark. Type: - string (or Expression with resultType string). - :type spark_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SparkObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) - self.type = 'SparkObject' - - -class SparkSource(CopySource): - """A copy activity Spark Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SparkSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SparkSource' - - -class SqlDWSink(CopySink): - """A copy activity SQL Data Warehouse sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL - Data Warehouse when applicable. Type: boolean (or Expression with - resultType boolean). - :type allow_poly_base: object - :param poly_base_settings: Specifies PolyBase-related settings when - allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlDWSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.allow_poly_base = kwargs.get('allow_poly_base', None) - self.poly_base_settings = kwargs.get('poly_base_settings', None) - self.table_option = kwargs.get('table_option', None) - self.type = 'SqlDWSink' - - -class SqlDWSource(CopySource): - """A copy activity SQL Data Warehouse source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or - Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Data Warehouse source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - Type: object (or Expression with resultType object), itemType: - StoredProcedureParameter. - :type stored_procedure_parameters: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlDWSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.type = 'SqlDWSource' - - -class SqlMISink(CopySink): - """A copy activity Azure SQL Managed Instance sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlMISink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.type = 'SqlMISink' - - -class SqlMISource(CopySource): - """A copy activity Azure SQL Managed Instance source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a Azure SQL Managed Instance source. This cannot be used at the same time - as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlMISource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.type = 'SqlMISource' - - -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'SqlServer' - - -class SqlServerSink(CopySink): - """A copy activity SQL server sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.type = 'SqlServerSink' - - -class SqlServerSource(CopySource): - """A copy activity SQL server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) - self.type = 'SqlServerSource' - - -class SqlServerStoredProcedureActivity(ExecutionActivity): - """SQL stored procedure activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: - string (or Expression with resultType string). - :type stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, **kwargs): - super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.stored_procedure_name = kwargs.get('stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.type = 'SqlServerStoredProcedure' - - -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param sql_server_table_dataset_schema: The schema name of the SQL Server - dataset. Type: string (or Expression with resultType string). - :type sql_server_table_dataset_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlServerTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) - self.table = kwargs.get('table', None) - self.type = 'SqlServerTable' - - -class SqlSink(CopySink): - """A copy activity SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SqlSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) - self.type = 'SqlSink' - - -class SqlSource(CopySource): - """A copy activity SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, **kwargs): - super(SqlSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.type = 'SqlSource' - - -class SquareLinkedService(LinkedService): - """Square Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Square instance. (i.e. - mystore.mysquare.com) - :type host: object - :param client_id: Required. The client ID associated with your Square - application. - :type client_id: object - :param client_secret: The client secret associated with your Square - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square - application dashboard. (i.e. http://localhost:2500) - :type redirect_uri: object - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SquareLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.redirect_uri = kwargs.get('redirect_uri', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Square' - - -class SquareObjectDataset(Dataset): - """Square Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SquareObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SquareObject' - - -class SquareSource(CopySource): - """A copy activity Square Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SquareSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SquareSource' - - -class SSISAccessCredential(Model): - """SSIS access credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(SSISAccessCredential, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - - -class SsisObjectMetadata(Model): - """SSIS object metadata. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} - } - - def __init__(self, **kwargs): - super(SsisObjectMetadata, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.type = None - - -class SsisEnvironment(SsisObjectMetadata): - """Ssis environment. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, - } - - def __init__(self, **kwargs): - super(SsisEnvironment, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.variables = kwargs.get('variables', None) - self.type = 'Environment' - - -class SsisEnvironmentReference(Model): - """Ssis environment reference. - - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type - :type reference_type: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.environment_folder_name = kwargs.get('environment_folder_name', None) - self.environment_name = kwargs.get('environment_name', None) - self.reference_type = kwargs.get('reference_type', None) - - -class SSISExecutionCredential(Model): - """SSIS package execution credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, **kwargs): - super(SSISExecutionCredential, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - - -class SSISExecutionParameter(Model): - """SSIS execution parameter. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: - string (or Expression with resultType string). - :type value: object - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SSISExecutionParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class SsisFolder(SsisObjectMetadata): - """Ssis folder. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisFolder, self).__init__(**kwargs) - self.type = 'Folder' - - -class SSISLogLocation(Model): - """SSIS package execution log location. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: - string (or Expression with resultType string). - :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File" - . - :vartype type: str - :param access_credential: The package execution log access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The - default interval is 5 minutes. Type: string (or Expression with resultType - string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object - """ - - _validation = { - 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, - } - - type = "File" - - def __init__(self, **kwargs): - super(SSISLogLocation, self).__init__(**kwargs) - self.log_path = kwargs.get('log_path', None) - self.access_credential = kwargs.get('access_credential', None) - self.log_refresh_interval = kwargs.get('log_refresh_interval', None) - - -class SsisObjectMetadataListResponse(Model): - """A list of SSIS object metadata. - - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class SsisObjectMetadataStatusResponse(Model): - """The status of the operation. - - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) - self.error = kwargs.get('error', None) - - -class SsisPackage(SsisObjectMetadata): - """Ssis Package. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, **kwargs): - super(SsisPackage, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.project_version = kwargs.get('project_version', None) - self.project_id = kwargs.get('project_id', None) - self.parameters = kwargs.get('parameters', None) - self.type = 'Package' - - -class SSISPackageLocation(Model): - """SSIS package location. - - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. The SSIS package path. Type: string (or - Expression with resultType string). - :type package_path: object - :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File' - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecureString - :param access_credential: The package access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package - execution. Type: string (or Expression with resultType string). - :type configuration_path: object - """ - - _validation = { - 'package_path': {'required': True}, - } - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SSISPackageLocation, self).__init__(**kwargs) - self.package_path = kwargs.get('package_path', None) - self.type = kwargs.get('type', None) - self.package_password = kwargs.get('package_password', None) - self.access_credential = kwargs.get('access_credential', None) - self.configuration_path = kwargs.get('configuration_path', None) - - -class SsisParameter(Model): - """Ssis parameter. - - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisParameter, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.required = kwargs.get('required', None) - self.sensitive = kwargs.get('sensitive', None) - self.design_default_value = kwargs.get('design_default_value', None) - self.default_value = kwargs.get('default_value', None) - self.sensitive_default_value = kwargs.get('sensitive_default_value', None) - self.value_type = kwargs.get('value_type', None) - self.value_set = kwargs.get('value_set', None) - self.variable = kwargs.get('variable', None) - - -class SsisProject(SsisObjectMetadata): - """Ssis project. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project - :type environment_refs: - list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, **kwargs): - super(SsisProject, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.version = kwargs.get('version', None) - self.environment_refs = kwargs.get('environment_refs', None) - self.parameters = kwargs.get('parameters', None) - self.type = 'Project' - - -class SSISPropertyOverride(Model): - """SSIS property override. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string - (or Expression with resultType string). - :type value: object - :param is_sensitive: Whether SSIS package property override value is - sensitive data. Value will be encrypted in SSISDB if it is true - :type is_sensitive: bool - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, - } - - def __init__(self, **kwargs): - super(SSISPropertyOverride, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.is_sensitive = kwargs.get('is_sensitive', None) - - -class SsisVariable(Model): - """Ssis variable. - - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisVariable, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.sensitive = kwargs.get('sensitive', None) - self.value = kwargs.get('value', None) - self.sensitive_value = kwargs.get('sensitive_value', None) - - -class StagingSettings(Model): - """Staging settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: - string (or Expression with resultType string). - :type path: object - :param enable_compression: Specifies whether to use compression when - copying data via an interim staging. Default value is false. Type: boolean - (or Expression with resultType boolean). - :type enable_compression: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) - self.enable_compression = kwargs.get('enable_compression', None) - - -class StoredProcedureParameter(Model): - """SQL stored procedure parameter. - - :param value: Stored procedure parameter value. Type: string (or - Expression with resultType string). - :type value: object - :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' - :type type: str or - ~azure.mgmt.datafactory.models.StoredProcedureParameterType - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.type = kwargs.get('type', None) - - -class SybaseLinkedService(LinkedService): - """Linked service for Sybase data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param schema: Schema name for connection. Type: string (or Expression - with resultType string). - :type schema: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SybaseLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.schema = kwargs.get('schema', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Sybase' - - -class SybaseSource(CopySource): - """A copy activity source for Sybase databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SybaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'SybaseSource' - - -class SybaseTableDataset(Dataset): - """The Sybase table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Sybase table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SybaseTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'SybaseTable' - - -class TeradataLinkedService(LinkedService): - """Linked service for Teradata data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). - :type server: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Teradata' - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range or hash partitioning. Type: string (or Expression with - resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. Possible values include: 'None', 'Hash', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.TeradataPartitionOption - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, **kwargs): - super(TeradataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) - self.type = 'TeradataSource' - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TeradataTableDataset, self).__init__(**kwargs) - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) - self.type = 'TeradataTable' - - -class TextFormat(DatasetStorageFormat): - """The data stored in text format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode - encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following - link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param treat_empty_as_null: Treat empty column values in the text file as - null. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type treat_empty_as_null: object - :param skip_line_count: The number of lines/rows to be skipped when - parsing text files. The default value is 0. Type: integer (or Expression - with resultType integer). - :type skip_line_count: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TextFormat, self).__init__(**kwargs) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.escape_char = kwargs.get('escape_char', None) - self.quote_char = kwargs.get('quote_char', None) - self.null_value = kwargs.get('null_value', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_line_count = kwargs.get('skip_line_count', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.type = 'TextFormat' - - -class TriggerDependencyReference(DependencyReference): - """Trigger referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - } - - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } - - def __init__(self, **kwargs): - super(TriggerDependencyReference, self).__init__(**kwargs) - self.reference_trigger = kwargs.get('reference_trigger', None) - self.type = 'TriggerDependencyReference' - - -class TriggerPipelineReference(Model): - """Pipeline that needs to be triggered with the given parameters. - - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - """ - - _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = kwargs.get('pipeline_reference', None) - self.parameters = kwargs.get('parameters', None) - - -class TriggerReference(Model): - """Trigger reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: - "TriggerReference" . - :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "TriggerReference" - - def __init__(self, **kwargs): - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - - -class TriggerResource(SubResource): - """Trigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, - } - - def __init__(self, **kwargs): - super(TriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class TriggerRun(Model): - """Trigger runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar trigger_run_id: Trigger run id. - :vartype trigger_run_id: str - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar trigger_type: Trigger type. - :vartype trigger_type: str - :ivar trigger_run_timestamp: Trigger run start time. - :vartype trigger_run_timestamp: datetime - :ivar status: Trigger run status. Possible values include: 'Succeeded', - 'Failed', 'Inprogress' - :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus - :ivar message: Trigger error message. - :vartype message: str - :ivar properties: List of property name and value related to trigger run. - Name, value pair depends on type of trigger. - :vartype properties: dict[str, str] - :ivar triggered_pipelines: List of pipeline name and run Id triggered by - the trigger run. - :vartype triggered_pipelines: dict[str, str] - :ivar run_dimension: Run dimension for which trigger was fired. - :vartype run_dimension: dict[str, str] - :ivar dependency_status: Status of the upstream pipelines. - :vartype dependency_status: dict[str, object] - """ - - _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - 'run_dimension': {'readonly': True}, - 'dependency_status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, - 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, - } - - def __init__(self, **kwargs): - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.trigger_run_id = None - self.trigger_name = None - self.trigger_type = None - self.trigger_run_timestamp = None - self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None - self.run_dimension = None - self.dependency_status = None - - -class TriggerRunsQueryResponse(Model): - """A list of trigger runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) - - -class TriggerSubscriptionOperationStatus(Model): - """Defines the response of a trigger subscription operation. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: - 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' - :vartype status: str or - ~azure.mgmt.datafactory.models.EventSubscriptionStatus - """ - - _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None - - -class TumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline runs for all fixed time interval windows - from a start time without gaps and also supports backfill scenarios (when - start time is in the past). - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when an - event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' - :type frequency: str or - ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum - interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the - trigger during which events are fired for windows that are ready. Only UTC - time is currently supported. - :type start_time: datetime - :param end_time: The end time for the time period for the trigger during - which events are fired for windows that are ready. Only UTC time is - currently supported. - :type end_time: datetime - :param delay: Specifies how long the trigger waits past due time before - triggering new run. It doesn't alter window start and end time. The - default is 0. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline - runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling - window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, - } - - def __init__(self, **kwargs): - super(TumblingWindowTrigger, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.delay = kwargs.get('delay', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.retry_policy = kwargs.get('retry_policy', None) - self.depends_on = kwargs.get('depends_on', None) - self.type = 'TumblingWindowTrigger' - - -class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): - """Referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window - when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) - self.type = 'TumblingWindowTriggerDependencyReference' - - -class UntilActivity(ControlActivity): - """This activity executes inner activities until the specified boolean - expression results to true or timeout is reached, whichever is earlier. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - The loop will continue until this expression evaluates to true - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: - string (or Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, **kwargs): - super(UntilActivity, self).__init__(**kwargs) - self.expression = kwargs.get('expression', None) - self.timeout = kwargs.get('timeout', None) - self.activities = kwargs.get('activities', None) - self.type = 'Until' - - -class UpdateIntegrationRuntimeNodeRequest(Model): - """Update integration runtime node request. - - :param concurrent_jobs_limit: The number of concurrent jobs permitted to - run on the integration runtime node. Values between 1 and - maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - """ - - _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, - } - - _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) - - -class UpdateIntegrationRuntimeRequest(Model): - """Update integration runtime request. - - :param auto_update: Enables or disables the auto-update feature of the - self-hosted integration runtime. See - https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: - 'On', 'Off' - :type auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., - PT03H is 3 hours. The integration runtime auto update will happen on that - time. - :type update_delay_offset: str - """ - - _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = kwargs.get('auto_update', None) - self.update_delay_offset = kwargs.get('update_delay_offset', None) - - -class UserAccessPolicy(Model): - """Get Data Plane read only token request definition. - - :param permissions: The string with permissions for Data Plane access. - Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to - factory. Currently only empty string is supported which corresponds to the - factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default - is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current - time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for - the token is eight hours and by default the token will expire in eight - hours. - :type expire_time: str - """ - - _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = kwargs.get('permissions', None) - self.access_resource_path = kwargs.get('access_resource_path', None) - self.profile_name = kwargs.get('profile_name', None) - self.start_time = kwargs.get('start_time', None) - self.expire_time = kwargs.get('expire_time', None) - - -class UserProperty(Model): - """User property. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression - with resultType string). - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(UserProperty, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) - - -class ValidationActivity(ControlActivity): - """This activity verifies that an external resource exists. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param sleep: A delay in seconds between validation attempts. If no value - is specified, 10 seconds will be used as the default. Type: integer (or - Expression with resultType integer). - :type sleep: object - :param minimum_size: Can be used if dataset points to a file. The file - must be greater than or equal in size to the value specified. Type: - integer (or Expression with resultType integer). - :type minimum_size: object - :param child_items: Can be used if dataset points to a folder. If set to - true, the folder must have at least one file. If set to false, the folder - must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, **kwargs): - super(ValidationActivity, self).__init__(**kwargs) - self.timeout = kwargs.get('timeout', None) - self.sleep = kwargs.get('sleep', None) - self.minimum_size = kwargs.get('minimum_size', None) - self.child_items = kwargs.get('child_items', None) - self.dataset = kwargs.get('dataset', None) - self.type = 'Validation' - - -class VariableSpecification(Model): - """Definition of a single variable for a Pipeline. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: 'String', - 'Bool', 'Array' - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VariableSpecification, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.default_value = kwargs.get('default_value', None) - - -class VerticaLinkedService(LinkedService): - """Vertica linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VerticaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Vertica' - - -class VerticaSource(CopySource): - """A copy activity Vertica source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VerticaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'VerticaSource' - - -class VerticaTableDataset(Dataset): - """Vertica dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Vertica. Type: string (or Expression - with resultType string). - :type table: object - :param vertica_table_dataset_schema: The schema name of the Vertica. Type: - string (or Expression with resultType string). - :type vertica_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(VerticaTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) - self.type = 'VerticaTable' - - -class WaitActivity(ControlActivity): - """This activity suspends pipeline execution for the specified interval. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(WaitActivity, self).__init__(**kwargs) - self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) - self.type = 'Wait' - - -class WebActivity(ExecutionActivity): - """Web activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE' - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string - (or Expression with resultType string). - :type url: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - } - - def __init__(self, **kwargs): - super(WebActivity, self).__init__(**kwargs) - self.method = kwargs.get('method', None) - self.url = kwargs.get('url', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) - self.type = 'WebActivity' - - -class WebActivityAuthentication(Model): - """Web activity authentication properties. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) - :type type: str - :param pfx: Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecureString - :param username: Web activity authentication user name for basic - authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - :param resource: Resource for which Azure Auth token will be requested - when using MSI Authentication. - :type resource: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecureString'}, - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - 'resource': {'key': 'resource', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.pfx = kwargs.get('pfx', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.resource = kwargs.get('resource', None) - - -class WebLinkedServiceTypeProperties(Model): - """Base definition of WebLinkedServiceTypeProperties, this typeProperties is - polymorphic based on authenticationType, so not flattened in SDK models. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebClientCertificateAuthentication, - WebBasicAuthentication, WebAnonymousAuthentication - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - _subtype_map = { - 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} - } - - def __init__(self, **kwargs): - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = None - - -class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses anonymous authentication to communicate with - an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebAnonymousAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Anonymous' - - -class WebBasicAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses basic authentication to communicate with an - HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param username: Required. User name for Basic authentication. Type: - string (or Expression with resultType string). - :type username: object - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, **kwargs): - super(WebBasicAuthentication, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.authentication_type = 'Basic' - - -class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses client certificate based authentication to - communicate with an HTTP endpoint. This scheme follows mutual - authentication; the server must also provide valid credentials to the - client. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, **kwargs): - super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.pfx = kwargs.get('pfx', None) - self.password = kwargs.get('password', None) - self.authentication_type = 'ClientCertificate' - - -class WebHookActivity(ControlActivity): - """WebHook activity. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :ivar method: Required. Rest API method for target endpoint. Default - value: "POST" . - :vartype method: str - :param url: Required. WebHook activity target endpoint and path. Type: - string (or Expression with resultType string). - :type url: object - :param timeout: The timeout within which the webhook should be called - back. If there is no value specified, it defaults to 10 minutes. Type: - string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - } - - method = "POST" - - def __init__(self, **kwargs): - super(WebHookActivity, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.timeout = kwargs.get('timeout', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.type = 'WebHook' - - -class WebLinkedService(LinkedService): - """Web linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Web linked service properties. - :type type_properties: - ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, - } - - def __init__(self, **kwargs): - super(WebLinkedService, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) - self.type = 'Web' - - -class WebSource(CopySource): - """A copy activity source for web page table. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebSource, self).__init__(**kwargs) - self.type = 'WebSource' - - -class WebTableDataset(Dataset): - """The dataset points to a HTML table in the web page. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index: Required. The zero-based index of the table in the web page. - Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object - :param path: The relative URL to the web page from the linked service URL. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(WebTableDataset, self).__init__(**kwargs) - self.index = kwargs.get('index', None) - self.path = kwargs.get('path', None) - self.type = 'WebTable' - - -class XeroLinkedService(LinkedService): - """Xero Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Xero server. (i.e. - api.xero.com) - :type host: object - :param consumer_key: The consumer key associated with the Xero - application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated - for your Xero private application. You must include all the text from the - .pem file, including the Unix line endings( - ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(XeroLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.private_key = kwargs.get('private_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Xero' - - -class XeroObjectDataset(Dataset): - """Xero Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(XeroObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'XeroObject' - - -class XeroSource(CopySource): - """A copy activity Xero Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(XeroSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'XeroSource' - - -class ZohoLinkedService(LinkedService): - """Zoho server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Zoho server. (i.e. - crm.zoho.com/crm/private) - :type endpoint: object - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ZohoLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Zoho' - - -class ZohoObjectDataset(Dataset): - """Zoho server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ZohoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.type = 'ZohoObject' - - -class ZohoSource(CopySource): - """A copy activity Zoho server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(ZohoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py deleted file mode 100644 index 7d89d02a63b8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ /dev/null @@ -1,29103 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model -from msrest.exceptions import HttpOperationError - - -class AccessPolicyResponse(Model): - """Get Data Plane read only token response definition. - - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str - """ - - _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, - } - - def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = policy - self.access_token = access_token - self.data_plane_url = data_plane_url - - -class Activity(Model): - """A pipeline activity. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ExecutionActivity, ControlActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(Activity, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.name = name - self.description = description - self.depends_on = depends_on - self.user_properties = user_properties - self.type = None - - -class ActivityDependency(Model): - """Activity dependency information. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the - dependency. - :type dependency_conditions: list[str or - ~azure.mgmt.datafactory.models.DependencyCondition] - """ - - _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, - } - - def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.activity = activity - self.dependency_conditions = dependency_conditions - - -class ActivityPolicy(Model): - """Execution policy for an activity. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param timeout: Specifies the timeout for the activity to run. The default - timeout is 7 days. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type retry: object - :param retry_interval_in_seconds: Interval between each retry attempt (in - seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered - as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered - as secure and will not be logged to monitoring. - :type secure_output: bool - """ - - _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, - } - - def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.timeout = timeout - self.retry = retry - self.retry_interval_in_seconds = retry_interval_in_seconds - self.secure_input = secure_input - self.secure_output = secure_output - - -class ActivityRun(Model): - """Information about an activity run in a pipeline. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar pipeline_name: The name of the pipeline. - :vartype pipeline_name: str - :ivar pipeline_run_id: The id of the pipeline run. - :vartype pipeline_run_id: str - :ivar activity_name: The name of the activity. - :vartype activity_name: str - :ivar activity_type: The type of the activity. - :vartype activity_type: str - :ivar activity_run_id: The id of the activity run. - :vartype activity_run_id: str - :ivar linked_service_name: The name of the compute linked service. - :vartype linked_service_name: str - :ivar status: The status of the activity run. - :vartype status: str - :ivar activity_run_start: The start time of the activity run in 'ISO 8601' - format. - :vartype activity_run_start: datetime - :ivar activity_run_end: The end time of the activity run in 'ISO 8601' - format. - :vartype activity_run_end: datetime - :ivar duration_in_ms: The duration of the activity run. - :vartype duration_in_ms: int - :ivar input: The input for the activity. - :vartype input: object - :ivar output: The output for the activity. - :vartype output: object - :ivar error: The error if any from the activity run. - :vartype error: object - """ - - _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.pipeline_name = None - self.pipeline_run_id = None - self.activity_name = None - self.activity_type = None - self.activity_run_id = None - self.linked_service_name = None - self.status = None - self.activity_run_start = None - self.activity_run_end = None - self.duration_in_ms = None - self.input = None - self.output = None - self.error = None - - -class ActivityRunsQueryResponse(Model): - """A list activity runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token - - -class LinkedService(Model): - """The Azure Data Factory nested object which contains the information and - credential which can be used to connect with related store or compute - resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, - RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, - AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, - SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.connect_via = connect_via - self.description = description - self.parameters = parameters - self.annotations = annotations - self.type = None - - -class AmazonMWSLinkedService(LinkedService): - """Amazon Marketplace Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com) - :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to - retrieve data from. To retrieve data from multiple Marketplace IDs, - separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) - :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. - :type seller_id: object - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.marketplace_id = marketplace_id - self.seller_id = seller_id - self.mws_auth_token = mws_auth_token - self.access_key_id = access_key_id - self.secret_key = secret_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'AmazonMWS' - - -class Dataset(Model): - """The Azure Data Factory nested object which identifies data within different - data stores, such as tables, files, folders, and documents. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, - CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, - AmazonS3Dataset - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(Dataset, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.structure = structure - self.schema = schema - self.linked_service_name = linked_service_name - self.parameters = parameters - self.annotations = annotations - self.folder = folder - self.type = None - - -class AmazonMWSObjectDataset(Dataset): - """Amazon Marketplace Web Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AmazonMWSObject' - - -class CopySource(Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, - OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, - SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, - XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, - HubspotSource, HiveSource, HBaseSource, GreenplumSource, - GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - JsonSource, DelimitedTextSource, ParquetSource, AvroSource - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(CopySource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.source_retry_count = source_retry_count - self.source_retry_wait = source_retry_wait - self.max_concurrent_connections = max_concurrent_connections - self.type = None - - -class AmazonMWSSource(CopySource): - """A copy activity Amazon Marketplace Web Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AmazonMWSSource' - - -class AmazonRedshiftLinkedService(LinkedService): - """Linked service for Amazon Redshift. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The name of the Amazon Redshift server. Type: - string (or Expression with resultType string). - :type server: object - :param username: The username of the Amazon Redshift source. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift - source. Type: string (or Expression with resultType string). - :type database: object - :param port: The TCP port number that the Amazon Redshift server uses to - listen for client connections. The default value is 5439. Type: integer - (or Expression with resultType integer). - :type port: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.username = username - self.password = password - self.database = database - self.port = port - self.encrypted_credential = encrypted_credential - self.type = 'AmazonRedshift' - - -class AmazonRedshiftSource(CopySource): - """A copy activity source for Amazon Redshift Source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param redshift_unload_settings: The Amazon S3 settings needed for the - interim Amazon S3 when copying from Amazon Redshift with unload. With - this, data from Amazon Redshift source will be unloaded into S3 first and - then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: - ~azure.mgmt.datafactory.models.RedshiftUnloadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.redshift_unload_settings = redshift_unload_settings - self.type = 'AmazonRedshiftSource' - - -class AmazonRedshiftTableDataset(Dataset): - """The Amazon Redshift table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The Amazon Redshift table name. Type: string (or Expression - with resultType string). - :type table: object - :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema - name. Type: string (or Expression with resultType string). - :type amazon_redshift_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: - super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema - self.type = 'AmazonRedshiftTable' - - -class AmazonS3Dataset(Dataset): - """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: - string (or Expression with resultType string). - :type bucket_name: object - :param key: The key of the Amazon S3 object. Type: string (or Expression - with resultType string). - :type key: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param version: The version for the S3 object. Type: string (or Expression - with resultType string). - :type version: object - :param modified_datetime_start: The start of S3 object's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of S3 object's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 - object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: - super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.bucket_name = bucket_name - self.key = key - self.prefix = prefix - self.version = version - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression - self.type = 'AmazonS3Object' - - -class AmazonS3LinkedService(LinkedService): - """Linked service for Amazon S3. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param access_key_id: The access key identifier of the Amazon S3 Identity - and Access Management (IAM) user. Type: string (or Expression with - resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Identity - and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the - S3 Connector. This is an optional property; change it only if you want to - try a different service endpoint or want to switch between https and http. - Type: string (or Expression with resultType string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.service_url = service_url - self.encrypted_credential = encrypted_credential - self.type = 'AmazonS3' - - -class DatasetLocation(Model): - """Dataset location. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.folder_path = folder_path - self.file_name = file_name - - -class AmazonS3Location(DatasetLocation): - """The location of amazon S3 dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or - Expression with resultType string) - :type bucket_name: object - :param version: Specify the version of amazon S3. Type: string (or - Expression with resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: - super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.bucket_name = bucket_name - self.version = version - - -class StoreReadSettings(Model): - """Connector read setting. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.max_concurrent_connections = max_concurrent_connections - - -class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.prefix = prefix - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - - -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebHookActivity, AppendVariableActivity, - SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, - WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' - - -class AppendVariableActivity(ControlActivity): - """Append value for a Variable of type Array. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be - appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.variable_name = variable_name - self.value = value - self.type = 'AppendVariable' - - -class AvroDataset(Dataset): - """Avro dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: 'none', 'deflate', - 'snappy', 'xz', 'bzip2' - :type avro_compression_codec: str or - ~azure.mgmt.datafactory.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.avro_compression_codec = avro_compression_codec - self.avro_compression_level = avro_compression_level - self.type = 'Avro' - - -class DatasetStorageFormat(Model): - """The format definition of a storage. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, - TextFormat - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.serializer = serializer - self.deserializer = deserializer - self.type = None - - -class AvroFormat(DatasetStorageFormat): - """The data stored in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'AvroFormat' - - -class CopySink(Model): - """A copy activity sink. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, - DelimitedTextSink - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(CopySink, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.write_batch_size = write_batch_size - self.write_batch_timeout = write_batch_timeout - self.sink_retry_count = sink_retry_count - self.sink_retry_wait = sink_retry_wait - self.max_concurrent_connections = max_concurrent_connections - self.type = None - - -class AvroSink(CopySink): - """A copy activity Avro sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'AvroSink' - - -class AvroSource(CopySource): - """A copy activity Avro source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'AvroSource' - - -class FormatWriteSettings(Model): - """Format write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - - -class AvroWriteSettings(FormatWriteSettings): - """Avro write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param record_name: Top level record name in write result, which is - required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: - super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.record_name = record_name - self.record_namespace = record_namespace - - -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Batch account name. Type: string - (or Expression with resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or - Expression with resultType string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or - Expression with resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.account_name = account_name - self.access_key = access_key - self.batch_uri = batch_uri - self.pool_name = pool_name - self.linked_service_name = linked_service_name - self.encrypted_credential = encrypted_credential - self.type = 'AzureBatch' - - -class AzureBlobDataset(Dataset): - """The Azure Blob storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Blob storage. Type: string (or - Expression with resultType string). - :type folder_path: object - :param table_root_location: The root of blob path. Type: string (or - Expression with resultType string). - :type table_root_location: object - :param file_name: The name of the Azure Blob. Type: string (or Expression - with resultType string). - :type file_name: object - :param modified_datetime_start: The start of Azure Blob's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of Azure Blob's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: - super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.table_root_location = table_root_location - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression - self.type = 'AzureBlob' - - -class AzureBlobFSDataset(Dataset): - """The Azure Data Lake Storage Gen2 storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. - Type: string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: - super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression - self.type = 'AzureBlobFSFile' - - -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. Type: - string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.account_key = account_key - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureBlobFS' - - -class AzureBlobFSLocation(DatasetLocation): - """The location of azure blobFS dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param file_system: Specify the fileSystem of azure blobFS. Type: string - (or Expression with resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: - super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.file_system = file_system - - -class AzureBlobFSReadSettings(StoreReadSettings): - """Azure blobFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - - -class AzureBlobFSSink(CopySink): - """A copy activity Azure Data Lake Storage Gen2 sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.type = 'AzureBlobFSSink' - - -class AzureBlobFSSource(CopySource): - """A copy activity Azure BlobFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive - self.type = 'AzureBlobFSSource' - - -class StoreWriteSettings(Model): - """Connector write settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, - AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.max_concurrent_connections = max_concurrent_connections - self.copy_behavior = copy_behavior - self.type = None - - -class AzureBlobFSWriteSettings(StoreWriteSettings): - """Azure blobFS write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.block_size_in_mb = block_size_in_mb - self.type = 'AzureBlobFSWriteSettings' - - -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually - exclusive with connectionString, serviceEndpoint property. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage - resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.service_endpoint = service_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureBlobStorage' - - -class AzureBlobStorageLocation(DatasetLocation): - """The location of azure blob dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param container: Specify the container of azure blob. Type: string (or - Expression with resultType string). - :type container: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: - super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.container = container - - -class AzureBlobStorageReadSettings(StoreReadSettings): - """Azure blob read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - - -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.block_size_in_mb = block_size_in_mb - self.type = 'AzureBlobStorageWriteSettings' - - -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param domain: Required. .azuredatabricks.net, domain name of your - Databricks deployment. Type: string (or Expression with resultType - string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: - string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param existing_cluster_id: The id of an existing cluster that will be - used for all runs of this job. Type: string (or Expression with resultType - string). - :type existing_cluster_id: object - :param new_cluster_version: The Spark version of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: Number of worker nodes that new cluster - should have. A string formatted Int32, like '1' means numOfWorker is 1 or - '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or - Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node types of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark - configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark - environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new - cluster. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for - the new cluster. Type: array of strings (or Expression with resultType - array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new - cluster. Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential - self.type = 'AzureDatabricks' - - -class ExecutionActivity(Activity): - """Base class for all execution activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, - SqlServerStoredProcedureActivity, CustomActivity, - ExecuteSSISPackageActivity, HDInsightSparkActivity, - HDInsightStreamingActivity, HDInsightMapReduceActivity, - HDInsightPigActivity, HDInsightHiveActivity, CopyActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - } - - _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.linked_service_name = linked_service_name - self.policy = policy - self.type = 'Execution' - - -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data - Explorer command syntax. Type: string (or Expression with resultType - string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) - :type command_timeout: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, - } - - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.command = command - self.command_timeout = command_timeout - self.type = 'AzureDataExplorerCommand' - - -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of Azure Data Explorer (the - engine's endpoint). URL will be in the format - https://..kusto.windows.net. Type: string (or - Expression with resultType string) - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - } - - def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant - self.type = 'AzureDataExplorer' - - -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param ingestion_mapping_name: A name of a pre-created csv mapping that - was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object - :param ingestion_mapping_as_json: An explicit column mapping description - provided in a json format. Type: string. - :type ingestion_mapping_as_json: object - :param flush_immediately: If set to true, any aggregation will be skipped. - Default is false. Type: boolean. - :type flush_immediately: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ingestion_mapping_name = ingestion_mapping_name - self.ingestion_mapping_as_json = ingestion_mapping_as_json - self.flush_immediately = flush_immediately - self.type = 'AzureDataExplorerSink' - - -class AzureDataExplorerSource(CopySource): - """A copy activity Azure Data Explorer (Kusto) source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Required. Database query. Should be a Kusto Query Language - (KQL) query. Type: string (or Expression with resultType string). - :type query: object - :param no_truncation: The name of the Boolean option that controls whether - truncation is applied to result-sets that go beyond a certain row-count - limit. - :type no_truncation: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object - """ - - _validation = { - 'type': {'required': True}, - 'query': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - } - - def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.no_truncation = no_truncation - self.query_timeout = query_timeout - self.type = 'AzureDataExplorerSource' - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table: The table name of the Azure Data Explorer database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table = table - self.type = 'AzureDataExplorerTable' - - -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Data Lake Analytics account name. - Type: string (or Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Analytics account. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group - name (if different from Data Factory account). Type: string (or Expression - with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string - (or Expression with resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.account_name = account_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.data_lake_analytics_uri = data_lake_analytics_uri - self.encrypted_credential = encrypted_credential - self.type = 'AzureDataLakeAnalytics' - - -class AzureDataLakeStoreDataset(Dataset): - """Azure Data Lake Store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the file in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in - the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: - super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression - self.type = 'AzureDataLakeStoreFile' - - -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: - string (or Expression with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Store account. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name - (if different from Data Factory account). Type: string (or Expression with - resultType string). - :type resource_group_name: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.data_lake_store_uri = data_lake_store_uri - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.account_name = account_name - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.encrypted_credential = encrypted_credential - self.type = 'AzureDataLakeStore' - - -class AzureDataLakeStoreLocation(DatasetLocation): - """The location of azure data lake store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - - -class AzureDataLakeStoreReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - - -class AzureDataLakeStoreSink(CopySink): - """A copy activity Azure Data Lake Store sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.enable_adls_single_file_parallel = enable_adls_single_file_parallel - self.type = 'AzureDataLakeStoreSink' - - -class AzureDataLakeStoreSource(CopySource): - """A copy activity Azure Data Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.type = 'AzureDataLakeStoreSource' - - -class AzureDataLakeStoreWriteSettings(StoreWriteSettings): - """Azure data lake store write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' - - -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' - :type method: str or - ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure - Function Activity will call. Type: string (or Expression with resultType - string) - :type function_name: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - } - - def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.method = method - self.function_name = function_name - self.headers = headers - self.body = body - self.type = 'AzureFunctionActivity' - - -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param function_app_url: Required. The endpoint of the Azure Function App. - URL will be in the format https://.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.function_app_url = function_app_url - self.function_key = function_key - self.encrypted_credential = encrypted_credential - self.type = 'AzureFunction' - - -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType - string). - :type base_url: object - """ - - _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - } - - def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.base_url = base_url - self.type = 'AzureKeyVault' - - -class SecretBase(Model): - """The base definition of a secret type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SecureString, AzureKeyVaultSecretReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} - } - - def __init__(self, **kwargs) -> None: - super(SecretBase, self).__init__(**kwargs) - self.type = None - - -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. - Type: string (or Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The - default value is the latest version of the secret. Type: string (or - Expression with resultType string). - :type secret_version: object - """ - - _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, - } - - def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.store = store - self.secret_name = secret_name - self.secret_version = secret_version - self.type = 'AzureKeyVaultSecret' - - -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'AzureMariaDB' - - -class AzureMariaDBSource(CopySource): - """A copy activity Azure MariaDB source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzureMariaDBSource' - - -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureMariaDBTable' - - -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML - Batch Execution Service endpoint. Keys must match the names of web service - parameters defined in the published Azure ML web service. Values will be - passed in the GlobalParameters property of the Azure ML batch execution - request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying - the output Blob locations. This information will be passed in the - WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying - the input Blob locations.. This information will be passed in the - WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.global_parameters = global_parameters - self.web_service_outputs = web_service_outputs - self.web_service_inputs = web_service_inputs - self.type = 'AzureMLBatchExecution' - - -class AzureMLLinkedService(LinkedService): - """Azure ML Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML - Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model - endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure - ML Web Service endpoint. Type: string (or Expression with resultType - string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.ml_endpoint = ml_endpoint - self.api_key = api_key - self.update_resource_endpoint = update_resource_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureML' - - -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in - the Web Service experiment to be updated. Type: string (or Expression with - resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage - linked service holding the .ilearner file that will be uploaded by the - update operation. - :type trained_model_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in - trainedModelLinkedService to represent the .ilearner file that will be - uploaded by the update operation. Type: string (or Expression with - resultType string). - :type trained_model_file_path: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, - } - - def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.trained_model_name = trained_model_name - self.trained_model_linked_service_name = trained_model_linked_service_name - self.trained_model_file_path = trained_model_file_path - self.type = 'AzureMLUpdateResource' - - -class AzureMLWebServiceFile(Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container - name, in the Azure Blob Storage specified by the LinkedService. Type: - string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage - LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = file_path - self.linked_service_name = linked_service_name - - -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'AzureMySql' - - -class AzureMySqlSink(CopySink): - """A copy activity Azure MySql sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'AzureMySqlSink' - - -class AzureMySqlSource(CopySource): - """A copy activity Azure MySQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzureMySqlSource' - - -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Azure MySQL database table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureMySqlTable' - - -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'AzurePostgreSql' - - -class AzurePostgreSqlSink(CopySink): - """A copy activity Azure PostgreSQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'AzurePostgreSqlSink' - - -class AzurePostgreSqlSource(CopySource): - """A copy activity Azure PostgreSQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzurePostgreSqlSource' - - -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Azure PostgreSQL database which - includes both schema and table. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type table: object - :param azure_postgre_sql_table_dataset_schema: The schema name of the - Azure PostgreSQL database. Type: string (or Expression with resultType - string). - :type azure_postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema - self.type = 'AzurePostgreSqlTable' - - -class AzureQueueSink(CopySink): - """A copy activity Azure Queue sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'AzureQueueSink' - - -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index_name: Required. The name of the Azure Search Index. Type: - string (or Expression with resultType string). - :type index_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.index_name = index_name - self.type = 'AzureSearchIndex' - - -class AzureSearchIndexSink(CopySink): - """A copy activity Azure Search Index sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'AzureSearchIndexSink' - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. URL for Azure Search service. Type: string (or - Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.key = key - self.encrypted_credential = encrypted_credential - self.type = 'AzureSearch' - - -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Database. Type: string (or Expression with - resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlDatabase' - - -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString - or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlDW' - - -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL - Data Warehouse. Type: string (or Expression with resultType string). - :type azure_sql_dw_table_dataset_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string - (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema - self.table = table - self.type = 'AzureSqlDWTable' - - -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Managed Instance. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlMI' - - -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL - Managed Instance. Type: string (or Expression with resultType string). - :type azure_sql_mi_table_dataset_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. - Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema - self.table = table - self.type = 'AzureSqlMITable' - - -class AzureSqlSink(CopySink): - """A copy activity Azure SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'AzureSqlSink' - - -class AzureSqlSource(CopySource): - """A copy activity Azure SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'AzureSqlSource' - - -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_table_dataset_schema: The schema name of the Azure SQL - database. Type: string (or Expression with resultType string). - :type azure_sql_table_dataset_schema: object - :param table: The table name of the Azure SQL database. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema - self.table = table - self.type = 'AzureSqlTable' - - -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential - self.type = 'AzureStorage' - - -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The table name of the Azure Table storage. - Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureTable' - - -class AzureTableSink(CopySink): - """A copy activity Azure Table sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_default_partition_key_value: Azure Table default - partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object - :param azure_table_partition_key_name: Azure Table partition key name. - Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object - :param azure_table_row_key_name: Azure Table row key name. Type: string - (or Expression with resultType string). - :type azure_table_row_key_name: object - :param azure_table_insert_type: Azure Table insert type. Type: string (or - Expression with resultType string). - :type azure_table_insert_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.azure_table_default_partition_key_value = azure_table_default_partition_key_value - self.azure_table_partition_key_name = azure_table_partition_key_name - self.azure_table_row_key_name = azure_table_row_key_name - self.azure_table_insert_type = azure_table_insert_type - self.type = 'AzureTableSink' - - -class AzureTableSource(CopySource): - """A copy activity Azure Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_source_query: Azure Table source query. Type: string - (or Expression with resultType string). - :type azure_table_source_query: object - :param azure_table_source_ignore_table_not_found: Azure Table source - ignore table not found. Type: boolean (or Expression with resultType - boolean). - :type azure_table_source_ignore_table_not_found: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.azure_table_source_query = azure_table_source_query - self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found - self.type = 'AzureTableSource' - - -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential - self.type = 'AzureTableStorage' - - -class BinaryDataset(Dataset): - """Binary dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary - dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.compression = compression - self.type = 'Binary' - - -class BinarySink(CopySink): - """A copy activity Binary sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'BinarySink' - - -class BinarySource(CopySource): - """A copy activity Binary source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'BinarySource' - - -class Trigger(Model): - """Azure data factory nested object which contains information about creating - pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, - TumblingWindowTrigger, MultiplePipelineTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} - } - - def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: - super(Trigger, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.runtime_state = None - self.annotations = annotations - self.type = None - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to - pipeline. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - } - - _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} - } - - def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipelines = pipelines - self.type = 'MultiplePipelineTrigger' - - -class BlobEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a Blob event occurs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern - provided for trigger to fire. For example, '/records/blobs/december/' will - only fire the trigger for blobs in the december folder under the records - container. At least one of these must be provided: blobPathBeginsWith, - blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern - provided for trigger to fire. For example, 'december/boxes.csv' will only - fire the trigger for blobs named boxes in a december folder. At least one - of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param events: Required. The type of events that cause this trigger to - fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.blob_path_begins_with = blob_path_begins_with - self.blob_path_ends_with = blob_path_ends_with - self.events = events - self.scope = scope - self.type = 'BlobEventsTrigger' - - -class BlobSink(CopySink): - """A copy activity Azure Blob sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: - boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object - :param blob_writer_date_time_format: Blob writer date time format. Type: - string (or Expression with resultType string). - :type blob_writer_date_time_format: object - :param blob_writer_add_header: Blob writer add header. Type: boolean (or - Expression with resultType boolean). - :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.blob_writer_overwrite_files = blob_writer_overwrite_files - self.blob_writer_date_time_format = blob_writer_date_time_format - self.blob_writer_add_header = blob_writer_add_header - self.copy_behavior = copy_behavior - self.type = 'BlobSink' - - -class BlobSource(CopySource): - """A copy activity Azure Blob source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive - self.type = 'BlobSource' - - -class BlobTrigger(MultiplePipelineTrigger): - """Trigger that runs every time the selected Blob container changes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will - trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to - handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service - reference. - :type linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.folder_path = folder_path - self.max_concurrency = max_concurrency - self.linked_service = linked_service - self.type = 'BlobTrigger' - - -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name for connection. Type: string (or - Expression with resultType string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression - with resultType integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.authentication_type = authentication_type - self.port = port - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Cassandra' - - -class CassandraSource(CopySource): - """A copy activity source for a Cassandra database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression or - Cassandra Query Language (CQL) command. Type: string (or Expression with - resultType string). - :type query: object - :param consistency_level: The consistency level specifies how many - Cassandra servers must respond to a read request before returning data to - the client application. Cassandra checks the specified number of Cassandra - servers for data to satisfy the read request. Must be one of - cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.consistency_level = consistency_level - self.type = 'CassandraSource' - - -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Cassandra database. Type: string - (or Expression with resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or - Expression with resultType string). - :type keyspace: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.keyspace = keyspace - self.type = 'CassandraTable' - - -class ChainingTrigger(Trigger): - """Trigger that allows the referenced pipeline to depend on other pipeline - runs based on runDimension Name/Value pairs. Upstream pipelines should - declare the same runDimension Name and their runs should have the values - for those runDimensions. The referenced pipeline run would be triggered if - the values for the runDimension match for all upstream pipeline runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when all - upstream pipelines complete successfully. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param depends_on: Required. Upstream Pipelines. - :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] - :param run_dimension: Required. Run Dimension property that needs to be - emitted by upstream pipelines. - :type run_dimension: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'depends_on': {'required': True}, - 'run_dimension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, - 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, - } - - def __init__(self, *, pipeline, depends_on, run_dimension: str, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: - super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipeline = pipeline - self.depends_on = depends_on - self.run_dimension = run_dimension - self.type = 'ChainingTrigger' - - -class CloudError(Model): - """The object that defines the structure of an Azure Data Factory error - response. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.mgmt.datafactory.models.CloudError] - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, - } - - def __init__(self, *, code: str, message: str, target: str=None, details=None, **kwargs) -> None: - super(CloudError, self).__init__(**kwargs) - self.code = code - self.message = message - self.target = target - self.details = details - - -class CloudErrorException(HttpOperationError): - """Server responsed with exception of type: 'CloudError'. - - :param deserialize: A deserializer - :param response: Server response to be deserialized. - """ - - def __init__(self, deserialize, response, *args): - - super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args) - - -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'CommonDataServiceForAppsEntity' - - -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Common Data - Service for Apps instance. 'Online' for Common Data Service for Apps - Online and 'OnPremisesWithIfd' for Common Data Service for Apps - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for - Apps server. The property is required for on-prem and not allowed for - online. Type: string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. - The property is required for on-prem and not allowed for online. Default - is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps - server. The property is required for on-line and not allowed for on-prem. - Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service - for Apps instance. The property is required for on-prem and required for - online when there are more than one Common Data Service for Apps instances - associated with the user. Type: string (or Expression with resultType - string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Common Data Service for Apps server. 'Office365' for online scenario, - 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with - resultType string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Common Data Service for - Apps instance. Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'CommonDataServiceForApps' - - -class CommonDataServiceForAppsSink(CopySink): - """A copy activity Common Data Service for Apps sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'CommonDataServiceForAppsSink' - - -class CommonDataServiceForAppsSource(CopySource): - """A copy activity Common Data Service for Apps source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Common Data Service for Apps (online & on-premises). Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'CommonDataServiceForAppsSource' - - -class ConcurLinkedService(LinkedService): - """Concur Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. Application client_id supplied by Concur App - Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Concur' - - -class ConcurObjectDataset(Dataset): - """Concur Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ConcurObject' - - -class ConcurSource(CopySource): - """A copy activity Concur Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ConcurSource' - - -class CopyActivity(ExecutionActivity): - """Copy activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular - translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim - staging. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when - EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on - the source or sink to avoid overloading the data store. Type: integer (or - Expression with resultType integer), minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units - that can be used to perform this data movement. Type: integer (or - Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. - Default value is false. Type: boolean (or Expression with resultType - boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row - settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - } - - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.source = source - self.sink = sink - self.translator = translator - self.enable_staging = enable_staging - self.staging_settings = staging_settings - self.parallel_copies = parallel_copies - self.data_integration_units = data_integration_units - self.enable_skip_incompatible_row = enable_skip_incompatible_row - self.redirect_incompatible_row_settings = redirect_incompatible_row_settings - self.preserve_rules = preserve_rules - self.preserve = preserve - self.inputs = inputs - self.outputs = outputs - self.type = 'Copy' - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.encrypted_credential = encrypted_credential - self.type = 'CosmosDb' - - -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the CosmosDB (MongoDB - API) database. Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection = collection - self.type = 'CosmosDbMongoDbApiCollection' - - -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The CosmosDB (MongoDB API) connection - string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database - that you want to access. Type: string (or Expression with resultType - string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'CosmosDbMongoDbApi' - - -class CosmosDbMongoDbApiSink(CopySink): - """A copy activity sink for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specifies whether the document with same key to be - overwritten (upsert) rather than throw exception (insert). The default - value is "insert". Type: string (or Expression with resultType string). - Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'CosmosDbMongoDbApiSink' - - -class CosmosDbMongoDbApiSource(CopySource): - """A copy activity source for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size - self.type = 'CosmosDbMongoDbApiSource' - - -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.cred_string = cred_string - self.encrypted_credential = encrypted_credential - self.type = 'Couchbase' - - -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'CouchbaseSource' - - -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'CouchbaseTable' - - -class CreateLinkedIntegrationRuntimeRequest(Model): - """The linked integration runtime information. - - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked - integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked - integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the - linked integration runtime belongs to. - :type data_factory_location: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = name - self.subscription_id = subscription_id - self.data_factory_name = data_factory_name - self.data_factory_location = data_factory_location - - -class CreateRunResponse(Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str - """ - - _validation = { - 'run_id': {'required': True}, - } - - _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__(self, *, run_id: str, **kwargs) -> None: - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = run_id - - -class CustomActivity(ExecutionActivity): - """Custom activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or - Expression with resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or - Expression with resultType string). - :type folder_path: object - :param reference_objects: Reference objects - :type reference_objects: - ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no - restriction on the keys or values that can be used. The user specified - custom activity has the full responsibility to consume and interpret the - content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted - for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - } - - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.command = command - self.resource_linked_service = resource_linked_service - self.folder_path = folder_path - self.reference_objects = reference_objects - self.extended_properties = extended_properties - self.retention_time_in_days = retention_time_in_days - self.type = 'Custom' - - -class CustomActivityReferenceObject(Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = linked_services - self.datasets = datasets - - -class CustomDataset(Dataset): - """The custom dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Custom dataset properties. - :type type_properties: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: - super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type_properties = type_properties - self.type = 'CustomDataset' - - -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Custom linked service properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type_properties = type_properties - self.type = 'CustomDataSource' - - -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be - run in the Databricks Workspace. This path must begin with a slash. Type: - string (or Expression with resultType string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this - job.If the notebook takes a parameter that is not specified, the default - value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.notebook_path = notebook_path - self.base_parameters = base_parameters - self.libraries = libraries - self.type = 'DatabricksNotebook' - - -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkJar' - - -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkPython' - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains - the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously - used to run the job. Default value is 1. Type: integer (or Expression with - resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should - be selected to run first. The lower the number, the higher the priority. - Default value is 1000. Type: integer (or Expression with resultType - integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: - string (or Expression with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these - values : Semantic, Full and SingleBox. Type: string (or Expression with - resultType string). - :type compilation_mode: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, - } - - def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.script_path = script_path - self.script_linked_service = script_linked_service - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.parameters = parameters - self.runtime_version = runtime_version - self.compilation_mode = compilation_mode - self.type = 'DataLakeAnalyticsU-SQL' - - -class DatasetCompression(Model): - """The compression method used on a dataset. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, - DatasetGZipCompression, DatasetBZip2Compression - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - - -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'BZip2' - - -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The Deflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'Deflate' - - -class DatasetFolder(Model): - """The folder that this Dataset is in. If not specified, Dataset will appear - at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, **kwargs) -> None: - super(DatasetFolder, self).__init__(**kwargs) - self.name = name - - -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The GZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'GZip' - - -class DatasetReference(Model): - """Dataset reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: - "DatasetReference" . - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "DatasetReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters - - -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(DatasetResource, self).__init__(**kwargs) - self.properties = properties - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'ZipDeflate' - - -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.database = database - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Db2' - - -class Db2Source(CopySource): - """A copy activity source for Db2 databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'Db2Source' - - -class Db2TableDataset(Dataset): - """The Db2 table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param db2_table_dataset_schema: The Db2 schema name. Type: string (or - Expression with resultType string). - :type db2_table_dataset_schema: object - :param table: The Db2 table name. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: - super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.db2_table_dataset_schema = db2_table_dataset_schema - self.table = table - self.type = 'Db2Table' - - -class DeleteActivity(ExecutionActivity): - """Delete activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path - will be deleted recursively. Default is false. Type: boolean (or - Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to - connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity - execution. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide - when enableLogging is true. - :type log_storage_settings: - ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.recursive = recursive - self.max_concurrent_connections = max_concurrent_connections - self.enable_logging = enable_logging - self.log_storage_settings = log_storage_settings - self.dataset = dataset - self.type = 'Delete' - - -class DelimitedTextDataset(Dataset): - """Delimited text dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: - :type compression_codec: object - :param compression_level: The data compression method used for - DelimitedText. - :type compression_level: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.encoding_name = encoding_name - self.compression_codec = compression_codec - self.compression_level = compression_level - self.quote_char = quote_char - self.escape_char = escape_char - self.first_row_as_header = first_row_as_header - self.null_value = null_value - self.type = 'DelimitedText' - - -class FormatReadSettings(Model): - """Format read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - - -class DelimitedTextReadSettings(FormatReadSettings): - """Delimited text read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip - when reading data from input files. Type: integer (or Expression with - resultType integer). - :type skip_line_count: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: - super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.skip_line_count = skip_line_count - - -class DelimitedTextSink(CopySink): - """A copy activity DelimitedText sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'DelimitedTextSink' - - -class DelimitedTextSource(CopySource): - """A copy activity DelimitedText source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'DelimitedTextSource' - - -class DelimitedTextWriteSettings(FormatWriteSettings): - """Delimited text write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param quote_all_text: Indicates whether string values should always be - enclosed with quotes. Type: boolean (or Expression with resultType - boolean). - :type quote_all_text: object - :param file_extension: Required. The file extension used to create the - files. Type: string (or Expression with resultType string). - :type file_extension: object - """ - - _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - } - - def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: - super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.quote_all_text = quote_all_text - self.file_extension = file_extension - - -class DependencyReference(Model): - """Referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, - TriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} - } - - def __init__(self, **kwargs) -> None: - super(DependencyReference, self).__init__(**kwargs) - self.type = None - - -class DistcpSettings(Model): - """Distcp settings. - - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn - ResourceManager endpoint. Type: string (or Expression with resultType - string). - :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which - will be used to store temp Distcp command script. The script file is - generated by ADF and will be removed after Copy job finished. Type: string - (or Expression with resultType string). - :type temp_script_path: object - :param distcp_options: Specifies the Distcp options. Type: string (or - Expression with resultType string). - :type distcp_options: object - """ - - _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, - } - - _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, - } - - def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = resource_manager_endpoint - self.temp_script_path = temp_script_path - self.distcp_options = distcp_options - - -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. Document Database collection name. Type: - string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection_name = collection_name - self.type = 'DocumentDbCollection' - - -class DocumentDbCollectionSink(CopySink): - """A copy activity Document Database Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param nesting_separator: Nested properties separator. Default is . (dot). - Type: string (or Expression with resultType string). - :type nesting_separator: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.nesting_separator = nesting_separator - self.write_behavior = write_behavior - self.type = 'DocumentDbCollectionSink' - - -class DocumentDbCollectionSource(CopySource): - """A copy activity Document Database Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Documents query. Type: string (or Expression with resultType - string). - :type query: object - :param nesting_separator: Nested properties separator. Type: string (or - Expression with resultType string). - :type nesting_separator: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.nesting_separator = nesting_separator - self.type = 'DocumentDbCollectionSource' - - -class DrillLinkedService(LinkedService): - """Drill server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Drill' - - -class DrillSource(CopySource): - """A copy activity Drill server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DrillSource' - - -class DrillTableDataset(Dataset): - """Drill server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression - with resultType string). - :type table: object - :param drill_table_dataset_schema: The schema name of the Drill. Type: - string (or Expression with resultType string). - :type drill_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.drill_table_dataset_schema = drill_table_dataset_schema - self.type = 'DrillTable' - - -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and - Operations) instance OData endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - Mark this field as a SecureString to store it securely in Data Factory, or - reference a secret stored in Azure Key Vault. Type: string (or Expression - with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or - tenant ID) under which your application resides. Retrieve it by hovering - the mouse in the top-right corner of the Azure portal. Type: string (or - Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting - authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - self.type = 'DynamicsAX' - - -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the Dynamics AX OData entity. Type: - string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'DynamicsAXResource' - - -class DynamicsAXSource(CopySource): - """A copy activity Dynamics AX source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsAXSource' - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'DynamicsCrmEntity' - - -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics CRM - instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for - Dynamics CRM on-premises with Ifd. Type: string (or Expression with - resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. - The property is required for on-prem and not allowed for online. Type: - string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The - property is required for on-line and not allowed for on-prem. Type: string - (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM - instance. The property is required for on-prem and required for online - when there are more than one Dynamics CRM instances associated with the - user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Dynamics CRM instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'DynamicsCrm' - - -class DynamicsCrmSink(CopySink): - """A copy activity Dynamics CRM sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'DynamicsCrmSink' - - -class DynamicsCrmSource(CopySource): - """A copy activity Dynamics CRM source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsCrmSource' - - -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'DynamicsEntity' - - -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics - instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for - Dynamics on-premises with Ifd. Type: string (or Expression with resultType - string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics server. The - property is required for on-prem and not allowed for online. Type: string - (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property - is required for on-line and not allowed for on-prem. Type: string (or - Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics instance. - The property is required for on-prem and required for online when there - are more than one Dynamics instances associated with the user. Type: - string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - with Ifd scenario. Type: string (or Expression with resultType string). - :type authentication_type: object - :param username: Required. User name to access the Dynamics instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Dynamics' - - -class DynamicsSink(CopySink): - """A copy activity Dynamics sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether ignore null values - from input dataset (except key fields) during write operation. Default is - false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'DynamicsSink' - - -class DynamicsSource(CopySource): - """A copy activity Dynamics source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsSource' - - -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. - eloqua.example.com) - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua - account in the form: sitename/username. (i.e. Eloqua/Alice) - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Eloqua' - - -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'EloquaObject' - - -class EloquaSource(CopySource): - """A copy activity Eloqua server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'EloquaSource' - - -class EntityReference(Model): - """The entity reference. - - :param type: The type of this referenced entity. Possible values include: - 'IntegrationRuntimeReference', 'LinkedServiceReference' - :type type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: - super(EntityReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name - - -class ExecutePipelineActivity(ControlActivity): - """Execute pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait - for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, - } - - def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.pipeline = pipeline - self.parameters = parameters - self.wait_on_completion = wait_on_completion - self.type = 'ExecutePipeline' - - -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value - should be "x86" or "x64". Type: string (or Expression with resultType - string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: - string (or Expression with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. - Type: string (or Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: - ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the - SSIS package. - :type project_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the - SSIS package. - :type package_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers - to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers - to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS - package. - :type property_overrides: dict[str, - ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, - } - - def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.package_location = package_location - self.runtime = runtime - self.logging_level = logging_level - self.environment_path = environment_path - self.execution_credential = execution_credential - self.connect_via = connect_via - self.project_parameters = project_parameters - self.package_parameters = package_parameters - self.project_connection_managers = project_connection_managers - self.package_connection_managers = package_connection_managers - self.property_overrides = property_overrides - self.log_location = log_location - self.type = 'ExecuteSSISPackage' - - -class ExposureControlRequest(Model): - """The exposure control request. - - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = feature_name - self.feature_type = feature_type - - -class ExposureControlResponse(Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - - -class Expression(Model): - """Azure Data Factory expression definition. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression" . - :vartype type: str - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - type = "Expression" - - def __init__(self, *, value: str, **kwargs) -> None: - super(Expression, self).__init__(**kwargs) - self.value = value - - -class Resource(Model): - """Azure Data Factory top-level resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location - self.tags = tags - self.e_tag = None - - -class Factory(Resource): - """Factory resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: - super(Factory, self).__init__(location=location, tags=tags, **kwargs) - self.additional_properties = additional_properties - self.identity = identity - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = repo_configuration - - -class FactoryRepoConfiguration(Model): - """Factory's git repo information. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.account_name = account_name - self.repository_name = repository_name - self.collaboration_branch = collaboration_branch - self.root_folder = root_folder - self.last_commit_id = last_commit_id - self.type = None - - -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param host_name: GitHub Enterprise host name. For example: - https://github.mydomain.com - :type host_name: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: - super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.host_name = host_name - self.type = 'FactoryGitHubConfiguration' - - -class FactoryIdentity(Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. The identity type. Currently the only supported type - is 'SystemAssigned'. Default value: "SystemAssigned" . - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - type = "SystemAssigned" - - def __init__(self, **kwargs) -> None: - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - - -class FactoryRepoUpdate(Model): - """Factory's git repo information. - - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = factory_resource_id - self.repo_configuration = repo_configuration - - -class FactoryUpdateParameters(Model): - """Parameters for updating a factory resource. - - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - } - - def __init__(self, *, tags=None, identity=None, **kwargs) -> None: - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = tags - self.identity = identity - - -class FactoryVSTSConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - 'project_name': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: - super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.project_name = project_name - self.tenant_id = tenant_id - self.type = 'FactoryVSTSConfiguration' - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression - with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'FileServer' - - -class FileServerLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - - -class FileServerReadSettings(StoreReadSettings): - """File server read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - - -class FileServerWriteSettings(StoreWriteSettings): - """File server write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type = 'FileServerWriteSettings' - - -class FileShareDataset(Dataset): - """An on-premises file system dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the on-premises file system. Type: string - (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the on-premises file system. Type: string - (or Expression with resultType string). - :type file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of - files in the folderPath rather than all files. Type: string (or Expression - with resultType string). - :type file_filter: object - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: - super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.file_filter = file_filter - self.compression = compression - self.type = 'FileShare' - - -class FileSystemSink(CopySink): - """A copy activity file system sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.type = 'FileSystemSink' - - -class FileSystemSource(CopySource): - """A copy activity file system source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.type = 'FileSystemSource' - - -class FilterActivity(ControlActivity): - """Filter and return results from input array based on the conditions. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, - } - - def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.items = items - self.condition = condition - self.type = 'Filter' - - -class ForEachActivity(ControlActivity): - """This activity is used for iterating over a collection and execute given - activities. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param is_sequential: Should the loop be executed in sequence or in - parallel (max 50) - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of - parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.is_sequential = is_sequential - self.batch_count = batch_count - self.items = items - self.activities = activities - self.type = 'ForEach' - - -class FtpReadSettings(StoreReadSettings): - """Ftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param use_binary_transfer: Specify whether to use binary transfer mode - for FTP stores. - :type use_binary_transfer: bool - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.use_binary_transfer = use_binary_transfer - - -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the FTP server. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for - client connections. Default value is 21. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS - channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP - server SSL certificate when connect over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.enable_ssl = enable_ssl - self.enable_server_certificate_validation = enable_server_certificate_validation - self.type = 'FtpServer' - - -class FtpServerLocation(DatasetLocation): - """The location of ftp server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.dataset = dataset - self.field_list = field_list - self.type = 'GetMetadata' - - -class GetSsisObjectMetadataRequest(Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } - - def __init__(self, *, metadata_path: str=None, **kwargs) -> None: - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = metadata_path - - -class GitHubAccessTokenRequest(Model): - """Get GitHub access token request definition. - - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base - URL. - :type git_hub_access_token_base_url: str - """ - - _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, - } - - _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, - } - - def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = git_hub_access_code - self.git_hub_client_id = git_hub_client_id - self.git_hub_access_token_base_url = git_hub_access_token_base_url - - -class GitHubAccessTokenResponse(Model): - """Get GitHub access token response definition. - - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str - """ - - _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, - } - - def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = git_hub_access_token - - -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_customer_id: Required. The Client customer ID of the AdWords - account that you want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the - manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_customer_id = client_customer_id - self.developer_token = developer_token - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - self.type = 'GoogleAdWords' - - -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'GoogleAdWordsObject' - - -class GoogleAdWordsSource(CopySource): - """A copy activity Google AdWords service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GoogleAdWordsSource' - - -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery - projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google - Drive. Allowing Google Drive access enables support for federated tables - that combine BigQuery data with data from Google Drive. The default value - is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.project = project - self.additional_projects = additional_projects - self.request_google_drive_scope = request_google_drive_scope - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - self.type = 'GoogleBigQuery' - - -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - database + table properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type dataset: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.dataset = dataset - self.type = 'GoogleBigQueryObject' - - -class GoogleBigQuerySource(CopySource): - """A copy activity Google BigQuery service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GoogleBigQuerySource' - - -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Greenplum' - - -class GreenplumSource(CopySource): - """A copy activity Greenplum Database source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GreenplumSource' - - -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression - with resultType string). - :type table: object - :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: - string (or Expression with resultType string). - :type greenplum_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.greenplum_table_dataset_schema = greenplum_table_dataset_schema - self.type = 'GreenplumTable' - - -class HBaseLinkedService(LinkedService): - """HBase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the HBase server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the HBase instance uses to listen for - client connections. The default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version) - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use - to connect to the HBase server. Possible values include: 'Anonymous', - 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'HBase' - - -class HBaseObjectDataset(Dataset): - """HBase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'HBaseObject' - - -class HBaseSource(CopySource): - """A copy activity HBase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HBaseSource' - - -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - HDFS. Possible values are: Anonymous and Windows. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.encrypted_credential = encrypted_credential - self.user_name = user_name - self.password = password - self.type = 'Hdfs' - - -class HdfsLocation(DatasetLocation): - """The location of HDFS. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - - -class HdfsReadSettings(StoreReadSettings): - """HDFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.distcp_settings = distcp_settings - - -class HdfsSource(CopySource): - """A copy activity HDFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.distcp_settings = distcp_settings - self.type = 'HdfsSource' - - -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when - the HDInsight cluster is with ESP (Enterprise Security Package) - :type query_timeout: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.variables = variables - self.query_timeout = query_timeout - self.type = 'HDInsightHive' - - -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or - Expression with resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked - service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP - (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the - HDInsight is ADLS Gen2. Type: string (or Expression with resultType - string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, - } - - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.cluster_uri = cluster_uri - self.user_name = user_name - self.password = password - self.linked_service_name = linked_service_name - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.encrypted_credential = encrypted_credential - self.is_esp_enabled = is_esp_enabled - self.file_system = file_system - self.type = 'HDInsight' - - -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with - resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with - resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job - request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.class_name = class_name - self.jar_file_path = jar_file_path - self.jar_linked_service = jar_linked_service - self.jar_libs = jar_libs - self.defines = defines - self.type = 'HDInsightMapReduce' - - -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_size: Required. Number of worker/data nodes in the cluster. - Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand - HDInsight cluster. Specifies how long the on-demand HDInsight cluster - stays alive after completion of an activity run if there are no other - active jobs in the cluster. The minimum value is 5 mins. Type: string (or - Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string - (or Expression with resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be - used by the on-demand cluster for storing and processing data. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host - the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the - hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the - cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be - distinct with timestamp. Type: string (or Expression with resultType - string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string - (or Expression with resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to - cluster’s node (for Linux). Type: string (or Expression with resultType - string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect - cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage - accounts for the HDInsight linked service so that the Data Factory service - can register them on your behalf. - :type additional_linked_service_names: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service - that point to the HCatalog database. The on-demand HDInsight cluster is - created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with - resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. - Type: string (or Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as - in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters - (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters - (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters - (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration - parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters - (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters - (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters - (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the - HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the - HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for - the HDInsight cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand - cluster once it's up. Please refer to - https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the - cluster should be joined after creation. Type: string (or Expression with - resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If - virtualNetworkId was specified, then this property is required. Type: - string (or Expression with resultType string). - :type subnet_name: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - } - - def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.cluster_size = cluster_size - self.time_to_live = time_to_live - self.version = version - self.linked_service_name = linked_service_name - self.host_subscription_id = host_subscription_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.cluster_resource_group = cluster_resource_group - self.cluster_name_prefix = cluster_name_prefix - self.cluster_user_name = cluster_user_name - self.cluster_password = cluster_password - self.cluster_ssh_user_name = cluster_ssh_user_name - self.cluster_ssh_password = cluster_ssh_password - self.additional_linked_service_names = additional_linked_service_names - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.cluster_type = cluster_type - self.spark_version = spark_version - self.core_configuration = core_configuration - self.h_base_configuration = h_base_configuration - self.hdfs_configuration = hdfs_configuration - self.hive_configuration = hive_configuration - self.map_reduce_configuration = map_reduce_configuration - self.oozie_configuration = oozie_configuration - self.storm_configuration = storm_configuration - self.yarn_configuration = yarn_configuration - self.encrypted_credential = encrypted_credential - self.head_node_size = head_node_size - self.data_node_size = data_node_size - self.zookeeper_node_size = zookeeper_node_size - self.script_actions = script_actions - self.virtual_network_id = virtual_network_id - self.subnet_name = subnet_name - self.type = 'HDInsightOnDemand' - - -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.type = 'HDInsightPig' - - -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for - all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of - the code/package to be executed. Type: string (or Expression with - resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading - the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. - Type: string (or Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, - } - - def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.root_path = root_path - self.entry_file_path = entry_file_path - self.arguments = arguments - self.get_debug_info = get_debug_info - self.spark_job_linked_service = spark_job_linked_service - self.class_name = class_name - self.proxy_user = proxy_user - self.spark_config = spark_config - self.type = 'HDInsightSpark' - - -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or - Expression with resultType string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or - Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with - resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression - with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be - directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are - located. - :type file_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression - with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.mapper = mapper - self.reducer = reducer - self.input = input - self.output = output - self.file_paths = file_paths - self.file_linked_service = file_linked_service - self.combiner = combiner - self.command_environment = command_environment - self.defines = defines - self.type = 'HDInsightStreaming' - - -class HiveLinkedService(LinkedService): - """Hive Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Hive server, - separated by ';' for multiple hosts (only when serviceDiscoveryMode is - enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client - connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: - 'HiveServer1', 'HiveServer2', 'HiveThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Hive server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper - service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive - Server 2 nodes are added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL - queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.service_discovery_mode = service_discovery_mode - self.zoo_keeper_name_space = zoo_keeper_name_space - self.use_native_query = use_native_query - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Hive' - - -class HiveObjectDataset(Dataset): - """Hive Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with - resultType string). - :type table: object - :param hive_object_dataset_schema: The schema name of the Hive. Type: - string (or Expression with resultType string). - :type hive_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.hive_object_dataset_schema = hive_object_dataset_schema - self.type = 'HiveObject' - - -class HiveSource(CopySource): - """A copy activity Hive Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HiveSource' - - -class HttpDataset(Dataset): - """A file in an HTTP web server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL based on the URL in the - HttpLinkedService refers to an HTTP file Type: string (or Expression with - resultType string). - :type relative_url: object - :param request_method: The HTTP method for the HTTP request. Type: string - (or Expression with resultType string). - :type request_method: object - :param request_body: The body for the HTTP request. Type: string (or - Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. - request-header-name-1:request-header-value-1 - ... - request-header-name-n:request-header-value-n Type: string (or Expression - with resultType string). - :type additional_headers: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: - super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.format = format - self.compression = compression - self.type = 'HttpFile' - - -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the HTTP endpoint, e.g. - http://www.microsoft.com. Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: The authentication type to be used to connect - to the HTTP server. Possible values include: 'Basic', 'Anonymous', - 'Digest', 'Windows', 'ClientCertificate' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. - Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate - with EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for - ClientCertificate authentication. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate - authentication. Only valid for on-premises copy. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS - server SSL certificate. Default value is true. Type: boolean (or - Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.embedded_cert_data = embedded_cert_data - self.cert_thumbprint = cert_thumbprint - self.encrypted_credential = encrypted_credential - self.enable_server_certificate_validation = enable_server_certificate_validation - self.type = 'HttpServer' - - -class HttpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param request_timeout: Specifies the timeout for a HTTP client to get - HTTP response from HTTP server. - :type request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.request_timeout = request_timeout - - -class HttpServerLocation(DatasetLocation): - """The location of http server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param relative_url: Specify the relativeUrl of http server. Type: string - (or Expression with resultType string) - :type relative_url: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: - super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.relative_url = relative_url - - -class HttpSource(CopySource): - """A copy activity source for an HTTP file. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param http_request_timeout: Specifies the timeout for a HTTP client to - get HTTP response from HTTP server. The default value is equivalent to - System.Net.HttpWebRequest.Timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.http_request_timeout = http_request_timeout - self.type = 'HttpSource' - - -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with your Hubspot - application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially - authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially - authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.client_secret = client_secret - self.access_token = access_token - self.refresh_token = refresh_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Hubspot' - - -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'HubspotObject' - - -class HubspotSource(CopySource): - """A copy activity Hubspot Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HubspotSource' - - -class IfConditionActivity(ControlActivity): - """This activity evaluates a boolean expression and executes either the - activities under the ifTrueActivities property or the ifFalseActivities - property depending on the result of the expression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - This is used to determine the block of activities (ifTrueActivities or - ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is - evaluated to true. This is an optional property and if not provided, the - activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is - evaluated to false. This is an optional property and if not provided, the - activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.expression = expression - self.if_true_activities = if_true_activities - self.if_false_activities = if_false_activities - self.type = 'IfCondition' - - -class ImpalaLinkedService(LinkedService): - """Impala server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Impala server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Impala server uses to listen for client - connections. The default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Anonymous', 'SASLUsername', - 'UsernameAndPassword' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The - default value is anonymous when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using - UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Impala' - - -class ImpalaObjectDataset(Dataset): - """Impala server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression - with resultType string). - :type table: object - :param impala_object_dataset_schema: The schema name of the Impala. Type: - string (or Expression with resultType string). - :type impala_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.impala_object_dataset_schema = impala_object_dataset_schema - self.type = 'ImpalaObject' - - -class ImpalaSource(CopySource): - """A copy activity Impala server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ImpalaSource' - - -class InformixLinkedService(LinkedService): - """Informix linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Informix as ODBC data store. Possible values are: Anonymous and Basic. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Informix' - - -class InformixSink(CopySink): - """A copy activity Informix sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'InformixSink' - - -class InformixSource(CopySource): - """A copy activity source for Informix. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'InformixSource' - - -class InformixTableDataset(Dataset): - """The Informix table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Informix table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'InformixTable' - - -class IntegrationRuntime(Model): - """Azure Data Factory nested object which serves as a compute resource for - activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} - } - - def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.type = None - - -class IntegrationRuntimeAuthKeys(Model): - """The integration runtime authentication keys. - - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str - """ - - _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, - } - - def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = auth_key1 - self.auth_key2 = auth_key2 - - -class IntegrationRuntimeComputeProperties(Model): - """The compute resource properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param location: The location for managed integration runtime. The - supported regions could be found on - https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities - :type location: str - :param node_size: The node size requirement to managed integration - runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed - integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count - per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties - """ - - _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, - } - - def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.location = location - self.node_size = node_size - self.number_of_nodes = number_of_nodes - self.max_parallel_executions_per_node = max_parallel_executions_per_node - self.v_net_properties = v_net_properties - - -class IntegrationRuntimeConnectionInfo(Model): - """Connection information for encrypting the on-premises data source - credentials. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar service_token: The token generated in service. Callers use this - token to authenticate to integration runtime. - :vartype service_token: str - :ivar identity_cert_thumbprint: The integration runtime SSL certificate - thumbprint. Click-Once application uses it to do server validation. - :vartype identity_cert_thumbprint: str - :ivar host_service_uri: The on-premises integration runtime host URL. - :vartype host_service_uri: str - :ivar version: The integration runtime version. - :vartype version: str - :ivar public_key: The public key for encrypting a credential when - transferring the credential to the integration runtime. - :vartype public_key: str - :ivar is_identity_cert_exprired: Whether the identity certificate is - expired. - :vartype is_identity_cert_exprired: bool - """ - - _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.service_token = None - self.identity_cert_thumbprint = None - self.host_service_uri = None - self.version = None - self.public_key = None - self.is_identity_cert_exprired = None - - -class IntegrationRuntimeCustomSetupScriptProperties(Model): - """Custom setup script properties for a managed dedicated integration runtime. - - :param blob_container_uri: The URI of the Azure blob container that - contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString - """ - - _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, - } - - def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = blob_container_uri - self.sas_token = sas_token - - -class IntegrationRuntimeDataProxyProperties(Model): - """Data proxy properties for a managed dedicated integration runtime. - - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: - ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str - """ - - _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = connect_via - self.staging_linked_service = staging_linked_service - self.path = path - - -class IntegrationRuntimeMonitoringData(Model): - """Get monitoring data response. - - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, - } - - def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = name - self.nodes = nodes - - -class IntegrationRuntimeNodeIpAddress(Model): - """The IP address of self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar ip_address: The IP address of self-hosted integration runtime node. - :vartype ip_address: str - """ - - _validation = { - 'ip_address': {'readonly': True}, - } - - _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) - self.ip_address = None - - -class IntegrationRuntimeNodeMonitoringData(Model): - """Monitoring data for integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar available_memory_in_mb: Available memory (MB) on the integration - runtime node. - :vartype available_memory_in_mb: int - :ivar cpu_utilization: CPU percentage on the integration runtime node. - :vartype cpu_utilization: int - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar concurrent_jobs_running: The number of jobs currently running on the - integration runtime node. - :vartype concurrent_jobs_running: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - :ivar sent_bytes: Sent bytes on the integration runtime node. - :vartype sent_bytes: float - :ivar received_bytes: Received bytes on the integration runtime node. - :vartype received_bytes: float - """ - - _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_name = None - self.available_memory_in_mb = None - self.cpu_utilization = None - self.concurrent_jobs_limit = None - self.concurrent_jobs_running = None - self.max_concurrent_jobs = None - self.sent_bytes = None - self.received_bytes = None - - -class IntegrationRuntimeReference(Model): - """Integration runtime reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference" . - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "IntegrationRuntimeReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters - - -class IntegrationRuntimeRegenerateKeyParameters(Model): - """Parameters to regenerate the authentication key. - - :param key_name: The name of the authentication key to regenerate. - Possible values include: 'authKey1', 'authKey2' - :type key_name: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName - """ - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - } - - def __init__(self, *, key_name=None, **kwargs) -> None: - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = key_name - - -class IntegrationRuntimeResource(SubResource): - """Integration runtime resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = properties - - -class IntegrationRuntimeSsisCatalogInfo(Model): - """Catalog information for managed dedicated integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog - database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user - account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. - The valid values could be found in - https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible - values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' - :type catalog_pricing_tier: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - """ - - _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.catalog_server_endpoint = catalog_server_endpoint - self.catalog_admin_user_name = catalog_admin_user_name - self.catalog_admin_password = catalog_admin_password - self.catalog_pricing_tier = catalog_pricing_tier - - -class IntegrationRuntimeSsisProperties(Model): - """SSIS properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_info: Catalog information for managed dedicated integration - runtime. - :type catalog_info: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. - Possible values include: 'BasePrice', 'LicenseIncluded' - :type license_type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for - a managed dedicated integration runtime. - :type custom_setup_script_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed - dedicated integration runtime. - :type data_proxy_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible - values include: 'Standard', 'Enterprise' - :type edition: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.catalog_info = catalog_info - self.license_type = license_type - self.custom_setup_script_properties = custom_setup_script_properties - self.data_proxy_properties = data_proxy_properties - self.edition = edition - - -class IntegrationRuntimeStatus(Model): - """Integration runtime status. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntimeStatus, - ManagedIntegrationRuntimeStatus - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.data_factory_name = None - self.state = None - self.type = None - - -class IntegrationRuntimeStatusListResponse(Model): - """A list of integration runtime status. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. - :type value: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, *, value, next_link: str=None, **kwargs) -> None: - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class IntegrationRuntimeStatusResponse(Model): - """Integration runtime status response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The integration runtime name. - :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus - """ - - _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) - self.name = None - self.properties = properties - - -class IntegrationRuntimeVNetProperties(Model): - """VNet properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param v_net_id: The ID of the VNet that this integration runtime will - join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.v_net_id = v_net_id - self.subnet = subnet - - -class JiraLinkedService(LinkedService): - """Jira Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Jira service. - (e.g. jira.example.com) - :type host: object - :param port: The TCP port that the Jira server uses to listen for client - connections. The default value is 443 if connecting through HTTPS, or 8080 - if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Jira' - - -class JiraObjectDataset(Dataset): - """Jira Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'JiraObject' - - -class JiraSource(CopySource): - """A copy activity Jira Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'JiraSource' - - -class JsonDataset(Dataset): - """Json dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not - specified, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: - super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.encoding_name = encoding_name - self.compression = compression - self.type = 'Json' - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. To be more specific, the way of - separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - :param nesting_separator: The character used to separate nesting levels. - Default value is '.' (dot). Type: string (or Expression with resultType - string). - :type nesting_separator: object - :param encoding_name: The code page name of the preferred encoding. If not - provided, the default value is 'utf-8', unless the byte order mark (BOM) - denotes another Unicode encoding. The full list of supported values can be - found in the 'Name' column of the table of encodings in the following - reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param json_node_reference: The JSONPath of the JSON array element to be - flattened. Example: "$.ArrayPath". Type: string (or Expression with - resultType string). - :type json_node_reference: object - :param json_path_definition: The JSONPath definition for each column - mapping with a customized column name to extract data from JSON file. For - fields under root object, start with "$"; for fields inside the array - chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. - Type: object (or Expression with resultType object). - :type json_path_definition: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: - super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.file_pattern = file_pattern - self.nesting_separator = nesting_separator - self.encoding_name = encoding_name - self.json_node_reference = json_node_reference - self.json_path_definition = json_path_definition - self.type = 'JsonFormat' - - -class JsonSink(CopySink): - """A copy activity Json sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'JsonSink' - - -class JsonSource(CopySource): - """A copy activity Json source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'JsonSource' - - -class JsonWriteSettings(FormatWriteSettings): - """Json write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param file_pattern: File pattern of JSON. This setting controls the way a - collection of JSON objects will be treated. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonWriteFilePattern - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, file_pattern=None, **kwargs) -> None: - super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.file_pattern = file_pattern - - -class LinkedIntegrationRuntime(Model): - """The linked integration runtime information. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: The name of the linked integration runtime. - :vartype name: str - :ivar subscription_id: The subscription ID for which the linked - integration runtime belong to. - :vartype subscription_id: str - :ivar data_factory_name: The name of the data factory for which the linked - integration runtime belong to. - :vartype data_factory_name: str - :ivar data_factory_location: The location of the data factory for which - the linked integration runtime belong to. - :vartype data_factory_location: str - :ivar create_time: The creating time of the linked integration runtime. - :vartype create_time: datetime - """ - - _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs) -> None: - super(LinkedIntegrationRuntime, self).__init__(**kwargs) - self.name = None - self.subscription_id = None - self.data_factory_name = None - self.data_factory_location = None - self.create_time = None - - -class LinkedIntegrationRuntimeType(Model): - """The base definition of a linked integration runtime. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, - LinkedIntegrationRuntimeKeyAuthorization - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - """ - - _validation = { - 'authorization_type': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - } - - _subtype_map = { - 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} - } - - def __init__(self, **kwargs) -> None: - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None - - -class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): - """The key authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, - } - - def __init__(self, *, key, **kwargs) -> None: - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.key = key - self.authorization_type = 'Key' - - -class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): - """The role based access control (RBAC) authorization type integration - runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration - runtime to be shared. - :type resource_id: str - """ - - _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__(self, *, resource_id: str, **kwargs) -> None: - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.resource_id = resource_id - self.authorization_type = 'RBAC' - - -class LinkedIntegrationRuntimeRequest(Model): - """Data factory name for linked integration runtime request. - - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked - integration runtime. - :type linked_factory_name: str - """ - - _validation = { - 'linked_factory_name': {'required': True}, - } - - _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, - } - - def __init__(self, *, linked_factory_name: str, **kwargs) -> None: - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = linked_factory_name - - -class LinkedServiceReference(Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: - "LinkedServiceReference" . - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters - - -class LinkedServiceResource(SubResource): - """Linked service resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = properties - - -class LogStorageSettings(Model): - """Log storage settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity - execution. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path - - -class LookupActivity(ExecutionActivity): - """Lookup activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy - activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default - value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, - } - - def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: - super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.source = source - self.dataset = dataset - self.first_row_only = first_row_only - self.type = 'Lookup' - - -class MagentoLinkedService(LinkedService): - """Magento server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Magento instance. (i.e. - 192.168.222.110/magento3) - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Magento' - - -class MagentoObjectDataset(Dataset): - """Magento server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MagentoObject' - - -class MagentoSource(CopySource): - """A copy activity Magento server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MagentoSource' - - -class ManagedIntegrationRuntime(IntegrationRuntime): - """Managed integration runtime, including managed elastic and managed - dedicated integration runtimes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :ivar state: Integration runtime state, only valid for managed dedicated - integration runtime. Possible values include: 'Initial', 'Stopped', - 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', - 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param compute_properties: The compute resource for managed integration - runtime. - :type compute_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - """ - - _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.state = None - self.compute_properties = compute_properties - self.ssis_properties = ssis_properties - self.type = 'Managed' - - -class ManagedIntegrationRuntimeError(Model): - """Error definition for managed integration runtime. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar time: The time when the error occurred. - :vartype time: datetime - :ivar code: Error code. - :vartype code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar message: Error message. - :vartype message: str - """ - - _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.time = None - self.code = None - self.parameters = None - self.message = None - - -class ManagedIntegrationRuntimeNode(Model): - """Properties of integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_id: The managed integration runtime node id. - :vartype node_id: str - :ivar status: The managed integration runtime node status. Possible values - include: 'Starting', 'Available', 'Recycling', 'Unavailable' - :vartype status: str or - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - """ - - _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, - } - - def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_id = None - self.status = None - self.errors = errors - - -class ManagedIntegrationRuntimeOperationResult(Model): - """Properties of managed integration runtime operation result. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar type: The operation type. Could be start or stop. - :vartype type: str - :ivar start_time: The start time of the operation. - :vartype start_time: datetime - :ivar result: The operation result. - :vartype result: str - :ivar error_code: The error code. - :vartype error_code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar activity_id: The activity id for the operation request. - :vartype activity_id: str - """ - - _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - self.start_time = None - self.result = None - self.error_code = None - self.parameters = None - self.activity_id = None - - -class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Managed integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] - :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - :ivar last_operation: The last operation result that occurred on this - integration runtime. - :vartype last_operation: - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.create_time = None - self.nodes = None - self.other_errors = None - self.last_operation = None - self.type = 'Managed' - - -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'MariaDB' - - -class MariaDBSource(CopySource): - """A copy activity MariaDB server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MariaDBSource' - - -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MariaDBTable' - - -class MarketoLinkedService(LinkedService): - """Marketo server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Marketo server. (i.e. - 123-ABC-321.mktorest.com) - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Marketo' - - -class MarketoObjectDataset(Dataset): - """Marketo server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MarketoObject' - - -class MarketoSource(CopySource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MarketoSource' - - -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Microsoft Access as ODBC data store. Possible values are: Anonymous and - Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'MicrosoftAccess' - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'MicrosoftAccessSink' - - -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MicrosoftAccessSource' - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MicrosoftAccessTable' - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. The table name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection_name = collection_name - self.type = 'MongoDbCollection' - - -class MongoDbCursorMethodsProperties(Model): - """Cursor methods for Mongodb query. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param project: Specifies the fields to return in the documents that match - the query filter. To return all fields in the matching documents, omit - this parameter. Type: string (or Expression with resultType string). - :type project: object - :param sort: Specifies the order in which the query returns matching - documents. Type: string (or Expression with resultType string). Type: - string (or Expression with resultType string). - :type sort: object - :param skip: Specifies the how many documents skipped and where MongoDB - begins returning results. This approach may be useful in implementing - paginated results. Type: integer (or Expression with resultType integer). - :type skip: object - :param limit: Specifies the maximum number of documents the server - returns. limit() is analogous to the LIMIT statement in a SQL database. - Type: integer (or Expression with resultType integer). - :type limit: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.project = project - self.sort = sort - self.skip = skip - self.limit = limit - - -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The IP address or server name of the MongoDB - server. Type: string (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the MongoDB database. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you - want to access. Type: string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: - string (or Expression with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen - for client connections. The default value is 27017. Type: integer (or - Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.authentication_type = authentication_type - self.database_name = database_name - self.username = username - self.password = password - self.auth_source = auth_source - self.port = port - self.enable_ssl = enable_ssl - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'MongoDb' - - -class MongoDbSource(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression. Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MongoDbSource' - - -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection = collection - self.type = 'MongoDbV2Collection' - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The MongoDB connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want - to access. Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'MongoDbV2' - - -class MongoDbV2Source(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size - self.type = 'MongoDbV2Source' - - -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'MySql' - - -class MySqlSource(CopySource): - """A copy activity source for MySQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MySqlSource' - - -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The MySQL table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MySqlTable' - - -class NetezzaLinkedService(LinkedService): - """Netezza linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Netezza' - - -class NetezzaPartitionSettings(Model): - """The settings that will be leveraged for Netezza source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - - -class NetezzaSource(CopySource): - """A copy activity Netezza source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - :param partition_option: The partition mechanism that will be used for - Netezza read in parallel. Possible values include: 'None', 'DataSlice', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.NetezzaPartitionOption - :param partition_settings: The settings that will be leveraged for Netezza - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.NetezzaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'NetezzaSource' - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression - with resultType string). - :type table: object - :param netezza_table_dataset_schema: The schema name of the Netezza. Type: - string (or Expression with resultType string). - :type netezza_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: - super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.netezza_table_dataset_schema = netezza_table_dataset_schema - self.type = 'NetezzaTable' - - -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the OData service endpoint. Type: string - (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', - 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) - under which your application resides. Type: string (or Expression with - resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your - application registered in Azure Active Directory. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting - authorization to use Directory. Type: string (or Expression with - resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type - (key or cert) is used for service principal. Possible values include: - 'ServicePrincipalKey', 'ServicePrincipalCert' - :type aad_service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application - registered in Azure Active Directory. Type: string (or Expression with - resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded - certificate of your application registered in Azure Active Directory. - Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: - ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of - your certificate if your certificate has a password and you are using - AadServicePrincipal authentication. Type: string (or Expression with - resultType string). - :type service_principal_embedded_cert_password: - ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: - super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.tenant = tenant - self.service_principal_id = service_principal_id - self.aad_resource_id = aad_resource_id - self.aad_service_principal_credential_type = aad_service_principal_credential_type - self.service_principal_key = service_principal_key - self.service_principal_embedded_cert = service_principal_embedded_cert - self.service_principal_embedded_cert_password = service_principal_embedded_cert_password - self.encrypted_credential = encrypted_credential - self.type = 'OData' - - -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: The OData resource path. Type: string (or Expression with - resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: - super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'ODataResource' - - -class ODataSource(CopySource): - """A copy activity source for OData source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ODataSource' - - -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - ODBC data store. Possible values are: Anonymous and Basic. Type: string - (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Odbc' - - -class OdbcSink(CopySink): - """A copy activity ODBC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'OdbcSink' - - -class OdbcSource(CopySource): - """A copy activity source for ODBC databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'OdbcSource' - - -class OdbcTableDataset(Dataset): - """The ODBC table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The ODBC table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'OdbcTable' - - -class Office365Dataset(Dataset): - """The Office365 account. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. Name of the dataset to extract from Office - 365. Type: string (or Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the - specific rows to extract from Office 365. Type: string (or Expression with - resultType string). - :type predicate: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: - super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.predicate = predicate - self.type = 'Office365Table' - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param office365_tenant_id: Required. Azure tenant ID to which the Office - 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant - information under which your Azure AD web application resides. Type: - string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: - super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.office365_tenant_id = office365_tenant_id - self.service_principal_tenant_id = service_principal_tenant_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.encrypted_credential = encrypted_credential - self.type = 'Office365' - - -class Office365Source(CopySource): - """A copy activity source for an Office365 service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param allowed_groups: The groups containing all the users. Type: array of - strings (or Expression with resultType array of strings). - :type allowed_groups: object - :param user_scope_filter_uri: The user scope uri. Type: string (or - Expression with resultType string). - :type user_scope_filter_uri: object - :param date_filter_column: The Column to apply the and . Type: string (or - Expression with resultType string). - :type date_filter_column: object - :param start_time: Start time of the requested range for this dataset. - Type: string (or Expression with resultType string). - :type start_time: object - :param end_time: End time of the requested range for this dataset. Type: - string (or Expression with resultType string). - :type end_time: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.allowed_groups = allowed_groups - self.user_scope_filter_uri = user_scope_filter_uri - self.date_filter_column = date_filter_column - self.start_time = start_time - self.end_time = end_time - self.type = 'Office365Source' - - -class Operation(Model): - """Azure Data Factory API operation definition. - - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: - ~azure.mgmt.datafactory.models.OperationServiceSpecification - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, - } - - def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: - super(Operation, self).__init__(**kwargs) - self.name = name - self.origin = origin - self.display = display - self.service_specification = service_specification - - -class OperationDisplay(Model): - """Metadata associated with the operation. - - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is - performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: - super(OperationDisplay, self).__init__(**kwargs) - self.description = description - self.provider = provider - self.resource = resource - self.operation = operation - - -class OperationLogSpecification(Model): - """Details about an operation related to logs. - - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.blob_duration = blob_duration - - -class OperationMetricAvailability(Model): - """Defines how often data for a metric becomes available. - - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = time_grain - self.blob_duration = blob_duration - - -class OperationMetricDimension(Model): - """Defines the metric dimension. - - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be - exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.to_be_exported_for_shoebox = to_be_exported_for_shoebox - - -class OperationMetricSpecification(Model): - """Details about an operation related to metrics. - - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using - regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes - available. - :type availabilities: - list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: - list[~azure.mgmt.datafactory.models.OperationMetricDimension] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.display_description = display_description - self.unit = unit - self.aggregation_type = aggregation_type - self.enable_regional_mdm_account = enable_regional_mdm_account - self.source_mdm_account = source_mdm_account - self.source_mdm_namespace = source_mdm_namespace - self.availabilities = availabilities - self.dimensions = dimensions - - -class OperationServiceSpecification(Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, - } - - def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = log_specifications - self.metric_specifications = metric_specifications - - -class OracleLinkedService(LinkedService): - """Oracle database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Oracle' - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for Oracle source partitioning. - - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = partition_names - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - - -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle - Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name - that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'OracleServiceCloud' - - -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'OracleServiceCloudObject' - - -class OracleServiceCloudSource(CopySource): - """A copy activity Oracle Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'OracleServiceCloudSource' - - -class OracleSink(CopySink): - """A copy activity Oracle sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'OracleSink' - - -class OracleSource(CopySource): - """A copy activity Oracle source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param oracle_reader_query: Oracle reader query. Type: string (or - Expression with resultType string). - :type oracle_reader_query: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - Oracle read in parallel. Possible values include: 'None', - 'PhysicalPartitionsOfTable', 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.OraclePartitionOption - :param partition_settings: The settings that will be leveraged for Oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.oracle_reader_query = oracle_reader_query - self.query_timeout = query_timeout - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'OracleSource' - - -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param oracle_table_dataset_schema: The schema name of the on-premises - Oracle database. Type: string (or Expression with resultType string). - :type oracle_table_dataset_schema: object - :param table: The table name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: - super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.oracle_table_dataset_schema = oracle_table_dataset_schema - self.table = table - self.type = 'OracleTable' - - -class OrcFormat(DatasetStorageFormat): - """The data stored in Optimized Row Columnar (ORC) format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'OrcFormat' - - -class ParameterSpecification(Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: 'Object', - 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, *, type, default_value=None, **kwargs) -> None: - super(ParameterSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value - - -class ParquetDataset(Dataset): - """Parquet dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: - :type compression_codec: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.compression_codec = compression_codec - self.type = 'Parquet' - - -class ParquetFormat(DatasetStorageFormat): - """The data stored in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'ParquetFormat' - - -class ParquetSink(CopySink): - """A copy activity Parquet sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'ParquetSink' - - -class ParquetSource(CopySource): - """A copy activity Parquet source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'ParquetSource' - - -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the PayPal instance. (i.e. - api.sandbox.paypal.com) - :type host: object - :param client_id: Required. The client ID associated with your PayPal - application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Paypal' - - -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'PaypalObject' - - -class PaypalSource(CopySource): - """A copy activity Paypal Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PaypalSource' - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Phoenix server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for - client connections. The default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. - (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix - if using WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Phoenix server. Possible values include: 'Anonymous', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Phoenix' - - -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression - with resultType string). - :type table: object - :param phoenix_object_dataset_schema: The schema name of the Phoenix. - Type: string (or Expression with resultType string). - :type phoenix_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: - super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.phoenix_object_dataset_schema = phoenix_object_dataset_schema - self.type = 'PhoenixObject' - - -class PhoenixSource(CopySource): - """A copy activity Phoenix server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PhoenixSource' - - -class PipelineFolder(Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear - at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, **kwargs) -> None: - super(PipelineFolder, self).__init__(**kwargs) - self.name = name - - -class PipelineReference(Model): - """Pipeline reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: - "PipelineReference" . - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - type = "PipelineReference" - - def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.name = name - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, - ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the - Pipeline. - :type annotations: list[object] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] - :param folder: The folder that this Pipeline is in. If not specified, - Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, run_dimensions=None, folder=None, **kwargs) -> None: - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.activities = activities - self.parameters = parameters - self.variables = variables - self.concurrency = concurrency - self.annotations = annotations - self.run_dimensions = run_dimensions - self.folder = folder - - -class PipelineRun(Model): - """Information about a pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a - pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in - its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair - used in the pipeline run. - :vartype parameters: dict[str, str] - :ivar run_dimensions: Run dimensions emitted by Pipeline run. - :vartype run_dimensions: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event - in ISO8601 format. - :vartype last_updated: datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str - """ - - _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'run_dimensions': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.run_dimensions = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None - - -class PipelineRunInvokedBy(Model): - """Provides entity name and id that started the pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None - - -class PipelineRunsQueryResponse(Model): - """A list pipeline runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token - - -class PolybaseSettings(Model): - """PolyBase settings. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param reject_type: Reject type. Possible values include: 'value', - 'percentage' - :type reject_type: str or - ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that - can be rejected before the query fails. Type: number (or Expression with - resultType number), minimum: 0. - :type reject_value: object - :param reject_sample_value: Determines the number of rows to attempt to - retrieve before the PolyBase recalculates the percentage of rejected rows. - Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object - :param use_type_default: Specifies how to handle missing values in - delimited text files when PolyBase retrieves data from the text file. - Type: boolean (or Expression with resultType boolean). - :type use_type_default: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.reject_type = reject_type - self.reject_value = reject_value - self.reject_sample_value = reject_sample_value - self.use_type_default = use_type_default - - -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'PostgreSql' - - -class PostgreSqlSource(CopySource): - """A copy activity source for PostgreSQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PostgreSqlSource' - - -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with - resultType string). - :type table: object - :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: - string (or Expression with resultType string). - :type postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema - self.type = 'PostgreSqlTable' - - -class PrestoLinkedService(LinkedService): - """Presto server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Presto server. - (i.e. 192.168.222.160) - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. - 0.148-t) - :type server_version: object - :param catalog: Required. The catalog context for all request against the - server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client - connections. The default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid - values for this option are specified in the IANA Time Zone Database. The - default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: - super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.server_version = server_version - self.catalog = catalog - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.time_zone_id = time_zone_id - self.encrypted_credential = encrypted_credential - self.type = 'Presto' - - -class PrestoObjectDataset(Dataset): - """Presto server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression - with resultType string). - :type table: object - :param presto_object_dataset_schema: The schema name of the Presto. Type: - string (or Expression with resultType string). - :type presto_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: - super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.presto_object_dataset_schema = presto_object_dataset_schema - self.type = 'PrestoObject' - - -class PrestoSource(CopySource): - """A copy activity Presto server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PrestoSource' - - -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com) - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to - authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 - authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 - authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 - authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth - 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: - super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.company_id = company_id - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self.access_token = access_token - self.access_token_secret = access_token_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.encrypted_credential = encrypted_credential - self.type = 'QuickBooks' - - -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'QuickBooksObject' - - -class QuickBooksSource(CopySource): - """A copy activity QuickBooks server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'QuickBooksSource' - - -class RecurrenceSchedule(Model): - """The recurrence schedule. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, - } - - def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.minutes = minutes - self.hours = hours - self.week_days = week_days - self.month_days = month_days - self.monthly_occurrences = monthly_occurrences - - -class RecurrenceScheduleOccurrence(Model): - """The recurrence schedule occurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param day: The day of the week. Possible values include: 'Sunday', - 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'DayOfWeek'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, - } - - def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.day = day - self.occurrence = occurrence - - -class RedirectIncompatibleRowSettings(Model): - """Redirect incompatible row settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage - SAS, or Azure Data Lake Store linked service used for redirecting - incompatible row. Must be specified if redirectIncompatibleRowSettings is - specified. Type: string (or Expression with resultType string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path - - -class RedshiftUnloadSettings(Model): - """The Amazon S3 settings needed for the interim Amazon S3 when copying from - Amazon Redshift with unload. With this, data from Amazon Redshift source - will be unloaded into S3 first and then copied into the targeted sink from - the interim S3. - - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked - service which will be used for the unload operation when copying from the - Amazon Redshift source. - :type s3_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which - will be used to store the unloaded data from Amazon Redshift source. The - bucket must be in the same region as the Amazon Redshift source. Type: - string (or Expression with resultType string). - :type bucket_name: object - """ - - _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - } - - def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = s3_linked_service_name - self.bucket_name = bucket_name - - -class RelationalSource(CopySource): - """A copy activity source for various relational databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'RelationalSource' - - -class RelationalTableDataset(Dataset): - """The relational table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The relational table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'RelationalTable' - - -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties - - -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows - from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period - for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: datetime - :param requested_end_time: Required. The end time for the time period for - which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.parent_trigger = parent_trigger - self.requested_start_time = requested_start_time - self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency - self.type = 'RerunTumblingWindowTrigger' - - -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency - - -class ResponsysLinkedService(LinkedService): - """Responsys linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Responsys' - - -class ResponsysObjectDataset(Dataset): - """Responsys dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ResponsysObject' - - -class ResponsysSource(CopySource): - """A copy activity Responsys source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ResponsysSource' - - -class RestResourceDataset(Dataset): - """A Rest service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL to the resource that the RESTful API - provides. Type: string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: - super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.type = 'RestResource' - - -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server - side SSL certificate when connecting to the endpoint.The default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to - connect to the REST service. Possible values include: 'Anonymous', - 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: object - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The application's client ID used in - AadServicePrincipal authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in - AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in - AadServicePrincipal authentication type under which your application - resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to - use. - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.enable_server_certificate_validation = enable_server_certificate_validation - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - self.type = 'RestService' - - -class RestSource(CopySource): - """A copy activity Rest service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP - response. It is the timeout to get a response, not the timeout to read - response data. Default value: 00:01:40. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next page - request. - :type request_interval: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.http_request_timeout = http_request_timeout - self.request_interval = request_interval - self.type = 'RestSource' - - -class RetryPolicy(Model): - """Execution policy for an activity. - - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type count: object - :param interval_in_seconds: Interval between retries in seconds. Default - is 30. - :type interval_in_seconds: int - """ - - _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, - } - - def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: - super(RetryPolicy, self).__init__(**kwargs) - self.count = count - self.interval_in_seconds = interval_in_seconds - - -class RunFilterParameters(Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next - page of results. Null for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run - event was updated in 'ISO 8601' format. - :type last_updated_after: datetime - :param last_updated_before: Required. The time at or before which the run - event was updated in 'ISO 8601' format. - :type last_updated_before: datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = continuation_token - self.last_updated_after = last_updated_after - self.last_updated_before = last_updated_before - self.filters = filters - self.order_by = order_by - - -class RunQueryFilter(Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The - allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd - and Status; to query activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd, ActivityType and Status, and to query trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values - include: 'Equals', 'NotEquals', 'In', 'NotIn' - :type operator: str or - ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__(self, *, operand, operator, values, **kwargs) -> None: - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = operand - self.operator = operator - self.values = values - - -class RunQueryOrderBy(Model): - """An object to provide order by options for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The - allowed parameters to order by for pipeline runs are PipelineName, - RunStart, RunEnd and Status; for activity runs are ActivityName, - ActivityRunStart, ActivityRunEnd and Status; for trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values - include: 'ASC', 'DESC' - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder - """ - - _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, - } - - _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, - } - - def __init__(self, *, order_by, order, **kwargs) -> None: - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = order_by - self.order = order - - -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, - for example, 'https://[domain].my.salesforce.com'. Type: string (or - Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.encrypted_credential = encrypted_credential - self.type = 'Salesforce' - - -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_id: object - :param client_secret: The client secret associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'SalesforceMarketingCloud' - - -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SalesforceMarketingCloudObject' - - -class SalesforceMarketingCloudSource(CopySource): - """A copy activity Salesforce Marketing Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SalesforceMarketingCloudSource' - - -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: - super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.object_api_name = object_api_name - self.type = 'SalesforceObject' - - -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce Service Cloud instance. - Default is 'https://login.salesforce.com'. To copy data from sandbox, - specify 'https://test.salesforce.com'. To copy data from custom domain, - specify, for example, 'https://[domain].my.salesforce.com'. Type: string - (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param extended_properties: Extended properties appended to the connection - string. Type: string (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.extended_properties = extended_properties - self.encrypted_credential = encrypted_credential - self.type = 'SalesforceServiceCloud' - - -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce Service Cloud object API name. - Type: string (or Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.object_api_name = object_api_name - self.type = 'SalesforceServiceCloudObject' - - -class SalesforceServiceCloudSink(CopySink): - """A copy activity Salesforce Service Cloud sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values - self.type = 'SalesforceServiceCloudSink' - - -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.read_behavior = read_behavior - self.type = 'SalesforceServiceCloudSource' - - -class SalesforceSink(CopySink): - """A copy activity Salesforce sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values - self.type = 'SalesforceSink' - - -class SalesforceSource(CopySource): - """A copy activity Salesforce source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.read_behavior = read_behavior - self.type = 'SalesforceSource' - - -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapBwCube' - - -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance. Type: string - (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a - two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. - (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapBW' - - -class SapBwSource(CopySource): - """A copy activity source for SapBW server via MDX. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: MDX query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapBwSource' - - -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP Cloud for Customer OData API. For - example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: - string (or Expression with resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: object - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapCloudForCustomer' - - -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP Cloud for Customer OData - entity. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'SapCloudForCustomerResource' - - -class SapCloudForCustomerSink(CopySink): - """A copy activity SAP Cloud for Customer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'SapCloudForCustomerSink' - - -class SapCloudForCustomerSource(CopySource): - """A copy activity source for SAP Cloud for Customer source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP Cloud for Customer OData query. For example, "$top=1". - Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapCloudForCustomerSource' - - -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or - Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: - super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapEcc' - - -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP ECC OData entity. Type: string - (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'SapEccResource' - - -class SapEccSource(CopySource): - """A copy activity source for SAP ECC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapEccSource' - - -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: SAP HANA ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the SAP HANA server. Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapHana' - - -class SapHanaSource(CopySource): - """A copy activity source for SAP HANA source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP HANA Sql query. Type: string (or Expression with - resultType string). - :type query: object - :param packet_size: The packet size of data read from SAP HANA. Type: - integer(or Expression with resultType integer). - :type packet_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.packet_size = packet_size - self.type = 'SapHanaSource' - - -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: - string (or Expression with resultType string). - :type sap_hana_table_dataset_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: - super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema - self.table = table - self.type = 'SapHanaTable' - - -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance where the open - hub destination is located. Type: string (or Expression with resultType - string). - :type server: object - :param system_number: Required. System number of the BW system where the - open hub destination is located. (Usually a two-digit decimal number - represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where - the open hub destination is located. (Usually a three-digit decimal number - represented as a string) Type: string (or Expression with resultType - string). - :type client_id: object - :param language: Language of the BW system where the open hub destination - is located. The default value is EN. Type: string (or Expression with - resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub - destination is located. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to access the SAP BW server where the open hub - destination is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapOpenHub' - - -class SapOpenHubSource(CopySource): - """A copy activity source for SAP Business Warehouse Open Hub Destination - source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - self.type = 'SapOpenHubSource' - - -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param open_hub_destination_name: Required. The name of the Open Hub - Destination with destination type as Database Table. Type: string (or - Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.open_hub_destination_name = open_hub_destination_name - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - self.type = 'SapOpenHubTable' - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Host name of the SAP instance where the table is located. - Type: string (or Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is - located. (Usually a two-digit decimal number represented as a string.) - Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the - table is located. (Usually a three-digit decimal number represented as a - string) Type: string (or Expression with resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. - The default value is EN. Type: string (or Expression with resultType - string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. - Type: string (or Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is - located. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: - string (or Expression with resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the - Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where - the table is located. Must be either 0 (off) or 1 (on). Type: string (or - Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where - the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, - 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or - Expression with resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: - super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.system_id = system_id - self.user_name = user_name - self.password = password - self.message_server = message_server - self.message_server_service = message_server_service - self.snc_mode = snc_mode - self.snc_my_name = snc_my_name - self.snc_partner_name = snc_partner_name - self.snc_library_path = snc_library_path - self.snc_qop = snc_qop - self.logon_group = logon_group - self.encrypted_credential = encrypted_credential - self.type = 'SapTable' - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - self.max_partitions_number = max_partitions_number - - -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The name of the SAP Table. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SapTableResource' - - -class SapTableSource(CopySource): - """A copy activity source for SAP Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param row_count: The number of rows to be retrieved. Type: integer(or - Expression with resultType integer). - :type row_count: object - :param row_skips: The number of rows that will be skipped. Type: integer - (or Expression with resultType integer). - :type row_skips: object - :param rfc_table_fields: The fields of the SAP table that will be - retrieved. For example, column0, column1. Type: string (or Expression with - resultType string). - :type rfc_table_fields: object - :param rfc_table_options: The options for the filtering of the SAP Table. - For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with - resultType string). - :type rfc_table_options: object - :param batch_size: Specifies the maximum number of rows that will be - retrieved at a time when retrieving data from SAP Table. Type: integer (or - Expression with resultType integer). - :type batch_size: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC - function module that will be used to read data from SAP Table. Type: - string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. Possible values include: 'None', 'PartitionOnInt', - 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', - 'PartitionOnCalendarDate', 'PartitionOnTime' - :type partition_option: str or - ~azure.mgmt.datafactory.models.SapTablePartitionOption - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.row_count = row_count - self.row_skips = row_skips - self.rfc_table_fields = rfc_table_fields - self.rfc_table_options = rfc_table_options - self.batch_size = batch_size - self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'SapTableSource' - - -class ScheduleTrigger(MultiplePipelineTrigger): - """Trigger that creates pipeline runs periodically, on schedule. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'recurrence': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, - } - - def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.recurrence = recurrence - self.type = 'ScheduleTrigger' - - -class ScheduleTriggerRecurrence(Model): - """The workflow trigger recurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param frequency: The frequency. Possible values include: 'NotSpecified', - 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: datetime - :param end_time: The end time. - :type end_time: datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.time_zone = time_zone - self.schedule = schedule - - -class ScriptAction(Model): - """Custom script action to run on HDI ondemand cluster once it's up. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should - be executed. - :type roles: object - :param parameters: The parameters for the script action. - :type parameters: str - """ - - _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, - } - - def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: - super(ScriptAction, self).__init__(**kwargs) - self.name = name - self.uri = uri - self.roles = roles - self.parameters = parameters - - -class SecureString(SecretBase): - """Azure Data Factory secure string definition. The string value will be - masked with asterisks '*' during Get or List API calls. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, *, value: str, **kwargs) -> None: - super(SecureString, self).__init__(**kwargs) - self.value = value - self.type = 'SecureString' - - -class SelfDependencyTumblingWindowTriggerReference(DependencyReference): - """Self referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling - window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.offset = offset - self.size = size - self.type = 'SelfDependencyTumblingWindowTriggerReference' - - -class SelfHostedIntegrationRuntime(IntegrationRuntime): - """Self-hosted integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param linked_info: - :type linked_info: - ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.linked_info = linked_info - self.type = 'SelfHosted' - - -class SelfHostedIntegrationRuntimeNode(Model): - """Properties of Self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar machine_name: Machine name of the integration runtime node. - :vartype machine_name: str - :ivar host_service_uri: URI for the host machine of the integration - runtime. - :vartype host_service_uri: str - :ivar status: Status of the integration runtime node. Possible values - include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', - 'Initializing', 'InitializeFailed' - :vartype status: str or - ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus - :ivar capabilities: The integration runtime capabilities dictionary - :vartype capabilities: dict[str, str] - :ivar version_status: Status of the integration runtime node version. - :vartype version_status: str - :ivar version: Version of the integration runtime node. - :vartype version: str - :ivar register_time: The time at which the integration runtime node was - registered in ISO8601 format. - :vartype register_time: datetime - :ivar last_connect_time: The most recent time at which the integration - runtime was connected in ISO8601 format. - :vartype last_connect_time: datetime - :ivar expiry_time: The time at which the integration runtime will expire - in ISO8601 format. - :vartype expiry_time: datetime - :ivar last_start_time: The time the node last started up. - :vartype last_start_time: datetime - :ivar last_stop_time: The integration runtime node last stop time. - :vartype last_stop_time: datetime - :ivar last_update_result: The result of the last integration runtime node - update. Possible values include: 'None', 'Succeed', 'Fail' - :vartype last_update_result: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult - :ivar last_start_update_time: The last time for the integration runtime - node update start. - :vartype last_start_update_time: datetime - :ivar last_end_update_time: The last time for the integration runtime node - update end. - :vartype last_end_update_time: datetime - :ivar is_active_dispatcher: Indicates whether this node is the active - dispatcher for integration runtime requests. - :vartype is_active_dispatcher: bool - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - """ - - _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_name = None - self.machine_name = None - self.host_service_uri = None - self.status = None - self.capabilities = None - self.version_status = None - self.version = None - self.register_time = None - self.last_connect_time = None - self.expiry_time = None - self.last_start_time = None - self.last_stop_time = None - self.last_update_result = None - self.last_start_update_time = None - self.last_end_update_time = None - self.is_active_dispatcher = None - self.concurrent_jobs_limit = None - self.max_concurrent_jobs = None - - -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Self-hosted integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar task_queue_id: The task queue id of the integration runtime. - :vartype task_queue_id: str - :ivar internal_channel_encryption: It is used to set the encryption mode - for node-node communication channel (when more than 2 self-hosted - integration runtime nodes exist). Possible values include: 'NotSet', - 'SslEncrypted', 'NotEncrypted' - :vartype internal_channel_encryption: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode - :ivar version: Version of the integration runtime. - :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: - list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] - :ivar scheduled_update_date: The date at which the integration runtime - will be scheduled to update, in ISO8601 format. - :vartype scheduled_update_date: datetime - :ivar update_delay_offset: The time in the date scheduled by service to - update the integration runtime, e.g., PT03H is 3 hours - :vartype update_delay_offset: str - :ivar local_time_zone_offset: The local time zone offset in hours. - :vartype local_time_zone_offset: str - :ivar capabilities: Object with additional information about integration - runtime capabilities. - :vartype capabilities: dict[str, str] - :ivar service_urls: The URLs for the services used in integration runtime - backend service. - :vartype service_urls: list[str] - :ivar auto_update: Whether Self-hosted integration runtime auto update has - been turned on. Possible values include: 'On', 'Off' - :vartype auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :ivar version_status: Status of the integration runtime version. - :vartype version_status: str - :param links: The list of linked integration runtimes that are created to - share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] - :ivar pushed_version: The version that the integration runtime is going to - update to. - :vartype pushed_version: str - :ivar latest_version: The latest version on download center. - :vartype latest_version: str - :ivar auto_update_eta: The estimated time when the self-hosted integration - runtime will be updated. - :vartype auto_update_eta: datetime - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.create_time = None - self.task_queue_id = None - self.internal_channel_encryption = None - self.version = None - self.nodes = nodes - self.scheduled_update_date = None - self.update_delay_offset = None - self.local_time_zone_offset = None - self.capabilities = None - self.service_urls = None - self.auto_update = None - self.version_status = None - self.links = links - self.pushed_version = None - self.latest_version = None - self.auto_update_eta = None - self.type = 'SelfHosted' - - -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - .service-now.com) - :type endpoint: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Basic', 'OAuth2' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server - for Basic and OAuth2 authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and - OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.authentication_type = authentication_type - self.username = username - self.password = password - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'ServiceNow' - - -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ServiceNowObject' - - -class ServiceNowSource(CopySource): - """A copy activity ServiceNow server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ServiceNowSource' - - -class SetVariableActivity(ControlActivity): - """Set value for a Variable. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: - super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.variable_name = variable_name - self.value = value - self.type = 'SetVariable' - - -class SftpLocation(DatasetLocation): - """The location of SFTP dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - - -class SftpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - - -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. . - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The SFTP server host name. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for - client connections. Default value is 22. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'SshPublicKey' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic - authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey - authentication. Only valid for on-premises copy. For on-premises copy with - SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent - should be specified. SSH private key should be OpenSSH format. Type: - string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for - SshPublicKey authentication. For on-premises copy with SshPublicKey - authentication, either PrivateKeyPath or PrivateKeyContent should be - specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH - private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key - validation. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. - When SkipHostKeyValidation is false, HostKeyFingerprint should be - specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: - super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.private_key_path = private_key_path - self.private_key_content = private_key_content - self.pass_phrase = pass_phrase - self.skip_host_key_validation = skip_host_key_validation - self.host_key_fingerprint = host_key_fingerprint - self.type = 'Sftp' - - -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Shopify server. (i.e. - mystore.myshopify.com) - :type host: object - :param access_token: The API access token that can be used to access - Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Shopify' - - -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ShopifyObject' - - -class ShopifySource(CopySource): - """A copy activity Shopify Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ShopifySource' - - -class SparkLinkedService(LinkedService): - """Spark Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Spark server - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen - for client connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: - 'SharkServer', 'SharkServer2', 'SparkThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Spark server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Spark' - - -class SparkObjectDataset(Dataset): - """Spark Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression - with resultType string). - :type table: object - :param spark_object_dataset_schema: The schema name of the Spark. Type: - string (or Expression with resultType string). - :type spark_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: - super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.spark_object_dataset_schema = spark_object_dataset_schema - self.type = 'SparkObject' - - -class SparkSource(CopySource): - """A copy activity Spark Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SparkSource' - - -class SqlDWSink(CopySink): - """A copy activity SQL Data Warehouse sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL - Data Warehouse when applicable. Type: boolean (or Expression with - resultType boolean). - :type allow_poly_base: object - :param poly_base_settings: Specifies PolyBase-related settings when - allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.allow_poly_base = allow_poly_base - self.poly_base_settings = poly_base_settings - self.table_option = table_option - self.type = 'SqlDWSink' - - -class SqlDWSource(CopySource): - """A copy activity SQL Data Warehouse source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or - Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Data Warehouse source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - Type: object (or Expression with resultType object), itemType: - StoredProcedureParameter. - :type stored_procedure_parameters: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlDWSource' - - -class SqlMISink(CopySink): - """A copy activity Azure SQL Managed Instance sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'SqlMISink' - - -class SqlMISource(CopySource): - """A copy activity Azure SQL Managed Instance source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a Azure SQL Managed Instance source. This cannot be used at the same time - as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'SqlMISource' - - -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SqlServer' - - -class SqlServerSink(CopySink): - """A copy activity SQL server sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'SqlServerSink' - - -class SqlServerSource(CopySource): - """A copy activity SQL server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'SqlServerSource' - - -class SqlServerStoredProcedureActivity(ExecutionActivity): - """SQL stored procedure activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: - string (or Expression with resultType string). - :type stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.stored_procedure_name = stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlServerStoredProcedure' - - -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param sql_server_table_dataset_schema: The schema name of the SQL Server - dataset. Type: string (or Expression with resultType string). - :type sql_server_table_dataset_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.sql_server_table_dataset_schema = sql_server_table_dataset_schema - self.table = table - self.type = 'SqlServerTable' - - -class SqlSink(CopySink): - """A copy activity SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'SqlSink' - - -class SqlSource(CopySource): - """A copy activity SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlSource' - - -class SquareLinkedService(LinkedService): - """Square Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Square instance. (i.e. - mystore.mysquare.com) - :type host: object - :param client_id: Required. The client ID associated with your Square - application. - :type client_id: object - :param client_secret: The client secret associated with your Square - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square - application dashboard. (i.e. http://localhost:2500) - :type redirect_uri: object - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.redirect_uri = redirect_uri - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Square' - - -class SquareObjectDataset(Dataset): - """Square Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SquareObject' - - -class SquareSource(CopySource): - """A copy activity Square Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SquareSource' - - -class SSISAccessCredential(Model): - """SSIS access credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, *, domain, user_name, password, **kwargs) -> None: - super(SSISAccessCredential, self).__init__(**kwargs) - self.domain = domain - self.user_name = user_name - self.password = password - - -class SsisObjectMetadata(Model): - """SSIS object metadata. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: - super(SsisObjectMetadata, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.type = None - - -class SsisEnvironment(SsisObjectMetadata): - """Ssis environment. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: - super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.variables = variables - self.type = 'Environment' - - -class SsisEnvironmentReference(Model): - """Ssis environment reference. - - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type - :type reference_type: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = id - self.environment_folder_name = environment_folder_name - self.environment_name = environment_name - self.reference_type = reference_type - - -class SSISExecutionCredential(Model): - """SSIS package execution credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, *, domain, user_name, password, **kwargs) -> None: - super(SSISExecutionCredential, self).__init__(**kwargs) - self.domain = domain - self.user_name = user_name - self.password = password - - -class SSISExecutionParameter(Model): - """SSIS execution parameter. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: - string (or Expression with resultType string). - :type value: object - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, *, value, **kwargs) -> None: - super(SSISExecutionParameter, self).__init__(**kwargs) - self.value = value - - -class SsisFolder(SsisObjectMetadata): - """Ssis folder. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: - super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Folder' - - -class SSISLogLocation(Model): - """SSIS package execution log location. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: - string (or Expression with resultType string). - :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File" - . - :vartype type: str - :param access_credential: The package execution log access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The - default interval is 5 minutes. Type: string (or Expression with resultType - string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object - """ - - _validation = { - 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, - } - - type = "File" - - def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: - super(SSISLogLocation, self).__init__(**kwargs) - self.log_path = log_path - self.access_credential = access_credential - self.log_refresh_interval = log_refresh_interval - - -class SsisObjectMetadataListResponse(Model): - """A list of SSIS object metadata. - - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class SsisObjectMetadataStatusResponse(Model): - """The status of the operation. - - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = status - self.name = name - self.properties = properties - self.error = error - - -class SsisPackage(SsisObjectMetadata): - """Ssis Package. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: - super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.project_version = project_version - self.project_id = project_id - self.parameters = parameters - self.type = 'Package' - - -class SSISPackageLocation(Model): - """SSIS package location. - - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. The SSIS package path. Type: string (or - Expression with resultType string). - :type package_path: object - :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File' - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecureString - :param access_credential: The package access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package - execution. Type: string (or Expression with resultType string). - :type configuration_path: object - """ - - _validation = { - 'package_path': {'required': True}, - } - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - } - - def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: - super(SSISPackageLocation, self).__init__(**kwargs) - self.package_path = package_path - self.type = type - self.package_password = package_password - self.access_credential = access_credential - self.configuration_path = configuration_path - - -class SsisParameter(Model): - """Ssis parameter. - - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: - super(SsisParameter, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.required = required - self.sensitive = sensitive - self.design_default_value = design_default_value - self.default_value = default_value - self.sensitive_default_value = sensitive_default_value - self.value_type = value_type - self.value_set = value_set - self.variable = variable - - -class SsisProject(SsisObjectMetadata): - """Ssis project. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project - :type environment_refs: - list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: - super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.version = version - self.environment_refs = environment_refs - self.parameters = parameters - self.type = 'Project' - - -class SSISPropertyOverride(Model): - """SSIS property override. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string - (or Expression with resultType string). - :type value: object - :param is_sensitive: Whether SSIS package property override value is - sensitive data. Value will be encrypted in SSISDB if it is true - :type is_sensitive: bool - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, - } - - def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: - super(SSISPropertyOverride, self).__init__(**kwargs) - self.value = value - self.is_sensitive = is_sensitive - - -class SsisVariable(Model): - """Ssis variable. - - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: - super(SsisVariable, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.sensitive = sensitive - self.value = value - self.sensitive_value = sensitive_value - - -class StagingSettings(Model): - """Staging settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: - string (or Expression with resultType string). - :type path: object - :param enable_compression: Specifies whether to use compression when - copying data via an interim staging. Default value is false. Type: boolean - (or Expression with resultType boolean). - :type enable_compression: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path - self.enable_compression = enable_compression - - -class StoredProcedureParameter(Model): - """SQL stored procedure parameter. - - :param value: Stored procedure parameter value. Type: string (or - Expression with resultType string). - :type value: object - :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' - :type type: str or - ~azure.mgmt.datafactory.models.StoredProcedureParameterType - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, value=None, type=None, **kwargs) -> None: - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = value - self.type = type - - -class SybaseLinkedService(LinkedService): - """Linked service for Sybase data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param schema: Schema name for connection. Type: string (or Expression - with resultType string). - :type schema: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.database = database - self.schema = schema - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Sybase' - - -class SybaseSource(CopySource): - """A copy activity source for Sybase databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SybaseSource' - - -class SybaseTableDataset(Dataset): - """The Sybase table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Sybase table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SybaseTable' - - -class TeradataLinkedService(LinkedService): - """Linked service for Teradata data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). - :type server: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Teradata' - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range or hash partitioning. Type: string (or Expression with - resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. Possible values include: 'None', 'Hash', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.TeradataPartitionOption - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'TeradataSource' - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: - super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.database = database - self.table = table - self.type = 'TeradataTable' - - -class TextFormat(DatasetStorageFormat): - """The data stored in text format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode - encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following - link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param treat_empty_as_null: Treat empty column values in the text file as - null. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type treat_empty_as_null: object - :param skip_line_count: The number of lines/rows to be skipped when - parsing text files. The default value is 0. Type: integer (or Expression - with resultType integer). - :type skip_line_count: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: - super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.escape_char = escape_char - self.quote_char = quote_char - self.null_value = null_value - self.encoding_name = encoding_name - self.treat_empty_as_null = treat_empty_as_null - self.skip_line_count = skip_line_count - self.first_row_as_header = first_row_as_header - self.type = 'TextFormat' - - -class TriggerDependencyReference(DependencyReference): - """Trigger referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - } - - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } - - def __init__(self, *, reference_trigger, **kwargs) -> None: - super(TriggerDependencyReference, self).__init__(**kwargs) - self.reference_trigger = reference_trigger - self.type = 'TriggerDependencyReference' - - -class TriggerPipelineReference(Model): - """Pipeline that needs to be triggered with the given parameters. - - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - """ - - _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = pipeline_reference - self.parameters = parameters - - -class TriggerReference(Model): - """Trigger reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: - "TriggerReference" . - :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "TriggerReference" - - def __init__(self, *, reference_name: str, **kwargs) -> None: - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = reference_name - - -class TriggerResource(SubResource): - """Trigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(TriggerResource, self).__init__(**kwargs) - self.properties = properties - - -class TriggerRun(Model): - """Trigger runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar trigger_run_id: Trigger run id. - :vartype trigger_run_id: str - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar trigger_type: Trigger type. - :vartype trigger_type: str - :ivar trigger_run_timestamp: Trigger run start time. - :vartype trigger_run_timestamp: datetime - :ivar status: Trigger run status. Possible values include: 'Succeeded', - 'Failed', 'Inprogress' - :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus - :ivar message: Trigger error message. - :vartype message: str - :ivar properties: List of property name and value related to trigger run. - Name, value pair depends on type of trigger. - :vartype properties: dict[str, str] - :ivar triggered_pipelines: List of pipeline name and run Id triggered by - the trigger run. - :vartype triggered_pipelines: dict[str, str] - :ivar run_dimension: Run dimension for which trigger was fired. - :vartype run_dimension: dict[str, str] - :ivar dependency_status: Status of the upstream pipelines. - :vartype dependency_status: dict[str, object] - """ - - _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - 'run_dimension': {'readonly': True}, - 'dependency_status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, - 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.trigger_run_id = None - self.trigger_name = None - self.trigger_type = None - self.trigger_run_timestamp = None - self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None - self.run_dimension = None - self.dependency_status = None - - -class TriggerRunsQueryResponse(Model): - """A list of trigger runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token - - -class TriggerSubscriptionOperationStatus(Model): - """Defines the response of a trigger subscription operation. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: - 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' - :vartype status: str or - ~azure.mgmt.datafactory.models.EventSubscriptionStatus - """ - - _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None - - -class TumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline runs for all fixed time interval windows - from a start time without gaps and also supports backfill scenarios (when - start time is in the past). - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when an - event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' - :type frequency: str or - ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum - interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the - trigger during which events are fired for windows that are ready. Only UTC - time is currently supported. - :type start_time: datetime - :param end_time: The end time for the time period for the trigger during - which events are fired for windows that are ready. Only UTC time is - currently supported. - :type end_time: datetime - :param delay: Specifies how long the trigger waits past due time before - triggering new run. It doesn't alter window start and end time. The - default is 0. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline - runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling - window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, - } - - def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipeline = pipeline - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.delay = delay - self.max_concurrency = max_concurrency - self.retry_policy = retry_policy - self.depends_on = depends_on - self.type = 'TumblingWindowTrigger' - - -class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): - """Referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window - when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: - super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) - self.offset = offset - self.size = size - self.type = 'TumblingWindowTriggerDependencyReference' - - -class UntilActivity(ControlActivity): - """This activity executes inner activities until the specified boolean - expression results to true or timeout is reached, whichever is earlier. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - The loop will continue until this expression evaluates to true - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: - string (or Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: - super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.expression = expression - self.timeout = timeout - self.activities = activities - self.type = 'Until' - - -class UpdateIntegrationRuntimeNodeRequest(Model): - """Update integration runtime node request. - - :param concurrent_jobs_limit: The number of concurrent jobs permitted to - run on the integration runtime node. Values between 1 and - maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - """ - - _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, - } - - _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - } - - def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = concurrent_jobs_limit - - -class UpdateIntegrationRuntimeRequest(Model): - """Update integration runtime request. - - :param auto_update: Enables or disables the auto-update feature of the - self-hosted integration runtime. See - https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: - 'On', 'Off' - :type auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., - PT03H is 3 hours. The integration runtime auto update will happen on that - time. - :type update_delay_offset: str - """ - - _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, - } - - def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = auto_update - self.update_delay_offset = update_delay_offset - - -class UserAccessPolicy(Model): - """Get Data Plane read only token request definition. - - :param permissions: The string with permissions for Data Plane access. - Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to - factory. Currently only empty string is supported which corresponds to the - factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default - is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current - time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for - the token is eight hours and by default the token will expire in eight - hours. - :type expire_time: str - """ - - _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, - } - - def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = permissions - self.access_resource_path = access_resource_path - self.profile_name = profile_name - self.start_time = start_time - self.expire_time = expire_time - - -class UserProperty(Model): - """User property. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression - with resultType string). - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, *, name: str, value, **kwargs) -> None: - super(UserProperty, self).__init__(**kwargs) - self.name = name - self.value = value - - -class ValidationActivity(ControlActivity): - """This activity verifies that an external resource exists. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param sleep: A delay in seconds between validation attempts. If no value - is specified, 10 seconds will be used as the default. Type: integer (or - Expression with resultType integer). - :type sleep: object - :param minimum_size: Can be used if dataset points to a file. The file - must be greater than or equal in size to the value specified. Type: - integer (or Expression with resultType integer). - :type minimum_size: object - :param child_items: Can be used if dataset points to a folder. If set to - true, the folder must have at least one file. If set to false, the folder - must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: - super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.timeout = timeout - self.sleep = sleep - self.minimum_size = minimum_size - self.child_items = child_items - self.dataset = dataset - self.type = 'Validation' - - -class VariableSpecification(Model): - """Definition of a single variable for a Pipeline. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: 'String', - 'Bool', 'Array' - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, *, type, default_value=None, **kwargs) -> None: - super(VariableSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value - - -class VerticaLinkedService(LinkedService): - """Vertica linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Vertica' - - -class VerticaSource(CopySource): - """A copy activity Vertica source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'VerticaSource' - - -class VerticaTableDataset(Dataset): - """Vertica dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Vertica. Type: string (or Expression - with resultType string). - :type table: object - :param vertica_table_dataset_schema: The schema name of the Vertica. Type: - string (or Expression with resultType string). - :type vertica_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: - super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.vertica_table_dataset_schema = vertica_table_dataset_schema - self.type = 'VerticaTable' - - -class WaitActivity(ControlActivity): - """This activity suspends pipeline execution for the specified interval. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, - } - - def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.wait_time_in_seconds = wait_time_in_seconds - self.type = 'Wait' - - -class WebActivity(ExecutionActivity): - """Web activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE' - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string - (or Expression with resultType string). - :type url: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - } - - def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: - super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.method = method - self.url = url - self.headers = headers - self.body = body - self.authentication = authentication - self.datasets = datasets - self.linked_services = linked_services - self.type = 'WebActivity' - - -class WebActivityAuthentication(Model): - """Web activity authentication properties. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) - :type type: str - :param pfx: Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecureString - :param username: Web activity authentication user name for basic - authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - :param resource: Resource for which Azure Auth token will be requested - when using MSI Authentication. - :type resource: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecureString'}, - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - 'resource': {'key': 'resource', 'type': 'str'}, - } - - def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = type - self.pfx = pfx - self.username = username - self.password = password - self.resource = resource - - -class WebLinkedServiceTypeProperties(Model): - """Base definition of WebLinkedServiceTypeProperties, this typeProperties is - polymorphic based on authenticationType, so not flattened in SDK models. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebClientCertificateAuthentication, - WebBasicAuthentication, WebAnonymousAuthentication - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - _subtype_map = { - 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} - } - - def __init__(self, *, url, **kwargs) -> None: - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = url - self.authentication_type = None - - -class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses anonymous authentication to communicate with - an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__(self, *, url, **kwargs) -> None: - super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type = 'Anonymous' - - -class WebBasicAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses basic authentication to communicate with an - HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param username: Required. User name for Basic authentication. Type: - string (or Expression with resultType string). - :type username: object - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, username, password, **kwargs) -> None: - super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.username = username - self.password = password - self.authentication_type = 'Basic' - - -class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses client certificate based authentication to - communicate with an HTTP endpoint. This scheme follows mutual - authentication; the server must also provide valid credentials to the - client. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, pfx, password, **kwargs) -> None: - super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.pfx = pfx - self.password = password - self.authentication_type = 'ClientCertificate' - - -class WebHookActivity(ControlActivity): - """WebHook activity. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :ivar method: Required. Rest API method for target endpoint. Default - value: "POST" . - :vartype method: str - :param url: Required. WebHook activity target endpoint and path. Type: - string (or Expression with resultType string). - :type url: object - :param timeout: The timeout within which the webhook should be called - back. If there is no value specified, it defaults to 10 minutes. Type: - string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - } - - method = "POST" - - def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: - super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.url = url - self.timeout = timeout - self.headers = headers - self.body = body - self.authentication = authentication - self.type = 'WebHook' - - -class WebLinkedService(LinkedService): - """Web linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Web linked service properties. - :type type_properties: - ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, - } - - def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type_properties = type_properties - self.type = 'Web' - - -class WebSource(CopySource): - """A copy activity source for web page table. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'WebSource' - - -class WebTableDataset(Dataset): - """The dataset points to a HTML table in the web page. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index: Required. The zero-based index of the table in the web page. - Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object - :param path: The relative URL to the web page from the linked service URL. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: - super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.index = index - self.path = path - self.type = 'WebTable' - - -class XeroLinkedService(LinkedService): - """Xero Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Xero server. (i.e. - api.xero.com) - :type host: object - :param consumer_key: The consumer key associated with the Xero - application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated - for your Xero private application. You must include all the text from the - .pem file, including the Unix line endings( - ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.consumer_key = consumer_key - self.private_key = private_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Xero' - - -class XeroObjectDataset(Dataset): - """Xero Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'XeroObject' - - -class XeroSource(CopySource): - """A copy activity Xero Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'XeroSource' - - -class ZohoLinkedService(LinkedService): - """Zoho server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Zoho server. (i.e. - crm.zoho.com/crm/private) - :type endpoint: object - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Zoho' - - -class ZohoObjectDataset(Dataset): - """Zoho server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ZohoObject' - - -class ZohoSource(CopySource): - """A copy activity Zoho server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py deleted file mode 100644 index 4092d2143a7c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py +++ /dev/null @@ -1,118 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.paging import Paged - - -class OperationPaged(Paged): - """ - A paging container for iterating over a list of :class:`Operation ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[Operation]'} - } - - def __init__(self, *args, **kwargs): - - super(OperationPaged, self).__init__(*args, **kwargs) -class FactoryPaged(Paged): - """ - A paging container for iterating over a list of :class:`Factory ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[Factory]'} - } - - def __init__(self, *args, **kwargs): - - super(FactoryPaged, self).__init__(*args, **kwargs) -class IntegrationRuntimeResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`IntegrationRuntimeResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[IntegrationRuntimeResource]'} - } - - def __init__(self, *args, **kwargs): - - super(IntegrationRuntimeResourcePaged, self).__init__(*args, **kwargs) -class LinkedServiceResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`LinkedServiceResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[LinkedServiceResource]'} - } - - def __init__(self, *args, **kwargs): - - super(LinkedServiceResourcePaged, self).__init__(*args, **kwargs) -class DatasetResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`DatasetResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[DatasetResource]'} - } - - def __init__(self, *args, **kwargs): - - super(DatasetResourcePaged, self).__init__(*args, **kwargs) -class PipelineResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`PipelineResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[PipelineResource]'} - } - - def __init__(self, *args, **kwargs): - - super(PipelineResourcePaged, self).__init__(*args, **kwargs) -class TriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`TriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[TriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(TriggerResourcePaged, self).__init__(*args, **kwargs) -class RerunTriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`RerunTriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py new file mode 100644 index 000000000000..033d0fd9591f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = kwargs.get('policy', None) + self.access_token = kwargs.get('access_token', None) + self.data_plane_url = kwargs.get('data_plane_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py new file mode 100644 index 000000000000..2932f547ff26 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = policy + self.access_token = access_token + self.data_plane_url = data_plane_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py new file mode 100644 index 000000000000..72d920f1d04c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, **kwargs): + super(Activity, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.depends_on = kwargs.get('depends_on', None) + self.user_properties = kwargs.get('user_properties', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py new file mode 100644 index 000000000000..a15b34acc24f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.activity = kwargs.get('activity', None) + self.dependency_conditions = kwargs.get('dependency_conditions', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py new file mode 100644 index 000000000000..2883a81a0adc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.activity = activity + self.dependency_conditions = dependency_conditions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py new file mode 100644 index 000000000000..4475cdbd9bea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.timeout = kwargs.get('timeout', None) + self.retry = kwargs.get('retry', None) + self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) + self.secure_input = kwargs.get('secure_input', None) + self.secure_output = kwargs.get('secure_output', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py new file mode 100644 index 000000000000..52d469679974 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.timeout = timeout + self.retry = retry + self.retry_interval_in_seconds = retry_interval_in_seconds + self.secure_input = secure_input + self.secure_output = secure_output diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py new file mode 100644 index 000000000000..b5997c9352e1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(Activity, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.description = description + self.depends_on = depends_on + self.user_properties = user_properties + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py new file mode 100644 index 000000000000..901ffe23cd4e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py new file mode 100644 index 000000000000..488e822de957 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py new file mode 100644 index 000000000000..2fcd25a5ced2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py new file mode 100644 index 000000000000..ee3eae141635 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py new file mode 100644 index 000000000000..b1e5ed533bba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.marketplace_id = kwargs.get('marketplace_id', None) + self.seller_id = kwargs.get('seller_id', None) + self.mws_auth_token = kwargs.get('mws_auth_token', None) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_key = kwargs.get('secret_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py new file mode 100644 index 000000000000..a8db63933154 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.marketplace_id = marketplace_id + self.seller_id = seller_id + self.mws_auth_token = mws_auth_token + self.access_key_id = access_key_id + self.secret_key = secret_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py new file mode 100644 index 000000000000..9885f5c77d8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py new file mode 100644 index 000000000000..015ed9401c15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py new file mode 100644 index 000000000000..f9d034e610d4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py new file mode 100644 index 000000000000..9ef7f5b30244 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py new file mode 100644 index 000000000000..4272b28c13f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.database = kwargs.get('database', None) + self.port = kwargs.get('port', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py new file mode 100644 index 000000000000..3b84583c6c86 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.username = username + self.password = password + self.database = database + self.port = port + self.encrypted_credential = encrypted_credential + self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py new file mode 100644 index 000000000000..d4fdfa4aa2ba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) + self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py new file mode 100644 index 000000000000..9b34b2ef5b97 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.redshift_unload_settings = redshift_unload_settings + self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py new file mode 100644 index 000000000000..987151367421 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.amazon_redshift_table_dataset_schema = kwargs.get('amazon_redshift_table_dataset_schema', None) + self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py new file mode 100644 index 000000000000..ceceaaba43e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: + super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema + self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py new file mode 100644 index 000000000000..e91a5ba26131 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Dataset, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.key = kwargs.get('key', None) + self.prefix = kwargs.get('prefix', None) + self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py new file mode 100644 index 000000000000..d84ae48b2a46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.bucket_name = bucket_name + self.key = key + self.prefix = prefix + self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py new file mode 100644 index 000000000000..250518c1a7ec --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3LinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py new file mode 100644 index 000000000000..8d136bb71fc0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py new file mode 100644 index 000000000000..74c77a16f0f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py new file mode 100644 index 000000000000..e83910136070 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py new file mode 100644 index 000000000000..79645a869ac8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py new file mode 100644 index 000000000000..36a25e959061 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AppendVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py new file mode 100644 index 000000000000..4526a6e4a45e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py new file mode 100644 index 000000000000..d206ac99ab85 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AvroDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py new file mode 100644 index 000000000000..f0f44dbbd786 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py new file mode 100644 index 000000000000..f0346a76080c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroFormat, self).__init__(**kwargs) + self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py new file mode 100644 index 000000000000..35d459c4b2a6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py new file mode 100644 index 000000000000..34d4ceb1e0f6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py new file mode 100644 index 000000000000..16363092dff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py new file mode 100644 index 000000000000..3ea2e7a2a76f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(AvroSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py new file mode 100644 index 000000000000..74b5e6db0fe2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py new file mode 100644 index 000000000000..ec068ee29885 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroWriteSettings, self).__init__(**kwargs) + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py new file mode 100644 index 000000000000..d14ebc4d1d29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.record_name = record_name + self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py new file mode 100644 index 000000000000..986023308e23 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBatchLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.access_key = kwargs.get('access_key', None) + self.batch_uri = kwargs.get('batch_uri', None) + self.pool_name = kwargs.get('pool_name', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py new file mode 100644 index 000000000000..e7d33dfb342a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py new file mode 100644 index 000000000000..01814cf8f9a9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.table_root_location = kwargs.get('table_root_location', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py new file mode 100644 index 000000000000..706c39deb289 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.table_root_location = table_root_location + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py new file mode 100644 index 000000000000..0ef62ff7122f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py new file mode 100644 index 000000000000..82136a683fd3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py new file mode 100644 index 000000000000..262ce976227b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py new file mode 100644 index 000000000000..f0d555078bf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py new file mode 100644 index 000000000000..c21525bbac4c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py new file mode 100644 index 000000000000..6d80ce72ea57 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py new file mode 100644 index 000000000000..af4746e84f8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py new file mode 100644 index 000000000000..a47b173c6581 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py new file mode 100644 index 000000000000..e2b28bf30a8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py new file mode 100644 index 000000000000..0252ffd5ba8f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py new file mode 100644 index 000000000000..5b512c1f334f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py new file mode 100644 index 000000000000..f91971b829f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py new file mode 100644 index 000000000000..351eae467183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py new file mode 100644 index 000000000000..5246e02ab9b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.service_endpoint = kwargs.get('service_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py new file mode 100644 index 000000000000..ba0a511532b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py new file mode 100644 index 000000000000..1efbbeaec352 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py new file mode 100644 index 000000000000..42b11cc6de16 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py new file mode 100644 index 000000000000..495ea16afd98 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py new file mode 100644 index 000000000000..c2834839f28a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py new file mode 100644 index 000000000000..a37c83039a8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py new file mode 100644 index 000000000000..308d445d1726 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.command_timeout = kwargs.get('command_timeout', None) + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py new file mode 100644 index 000000000000..2f04dfddf08f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.command_timeout = command_timeout + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py new file mode 100644 index 000000000000..5e5a9f7560c6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py new file mode 100644 index 000000000000..3cd8ab9c3c19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py new file mode 100644 index 000000000000..5c204ab769e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py new file mode 100644 index 000000000000..e5cb67bc79b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py new file mode 100644 index 000000000000..2caaa517efd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py new file mode 100644 index 000000000000..55a6bc78ee04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py new file mode 100644 index 000000000000..594d22171f48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py new file mode 100644 index 000000000000..d36b0f39c2fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py new file mode 100644 index 000000000000..0381e1b1de65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py new file mode 100644 index 000000000000..93250e2cef76 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py new file mode 100644 index 000000000000..de15057f78ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py new file mode 100644 index 000000000000..d2df0ffebe7e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py new file mode 100644 index 000000000000..f08e086cb500 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) + self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.account_name = kwargs.get('account_name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py new file mode 100644 index 000000000000..7b8ab293c0cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.data_lake_store_uri = data_lake_store_uri + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py new file mode 100644 index 000000000000..a4bf521a2005 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py new file mode 100644 index 000000000000..213d69966baf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py new file mode 100644 index 000000000000..b4bccc5e78a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py new file mode 100644 index 000000000000..e882698c2ca6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) + self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py new file mode 100644 index 000000000000..0f96cea725e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel + self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py new file mode 100644 index 000000000000..9d2046049a30 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py new file mode 100644 index 000000000000..e1d883972220 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py new file mode 100644 index 000000000000..6cf8deeacb07 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py new file mode 100644 index 000000000000..0b9a0e38e41c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py new file mode 100644 index 000000000000..6cc4c12674cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.access_token = kwargs.get('access_token', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py new file mode 100644 index 000000000000..6299dac1e3f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py new file mode 100644 index 000000000000..68b02e5f771f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.function_name = kwargs.get('function_name', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py new file mode 100644 index 000000000000..95bb1ca260e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body + self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py new file mode 100644 index 000000000000..2ed5b870a778 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionLinkedService, self).__init__(**kwargs) + self.function_app_url = kwargs.get('function_app_url', None) + self.function_key = kwargs.get('function_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py new file mode 100644 index 000000000000..a1bfdbe8b6c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.function_app_url = function_app_url + self.function_key = function_key + self.encrypted_credential = encrypted_credential + self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py new file mode 100644 index 000000000000..768f0d83ae93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureKeyVaultLinkedService, self).__init__(**kwargs) + self.base_url = kwargs.get('base_url', None) + self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py new file mode 100644 index 000000000000..50f4a58a5a1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.base_url = base_url + self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py new file mode 100644 index 000000000000..28d3e7d31cee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base import SecretBase + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = kwargs.get('store', None) + self.secret_name = kwargs.get('secret_name', None) + self.secret_version = kwargs.get('secret_version', None) + self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py new file mode 100644 index 000000000000..c5fe4c7afbd4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base_py3 import SecretBase + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version + self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py new file mode 100644 index 000000000000..d2dc7db88851 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py new file mode 100644 index 000000000000..c80015ed6b45 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py new file mode 100644 index 000000000000..229e6f4311e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py new file mode 100644 index 000000000000..11358f899e51 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py new file mode 100644 index 000000000000..a06c722279f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..9c6fd648af20 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py new file mode 100644 index 000000000000..f6c7c75a1299 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, **kwargs): + super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) + self.global_parameters = kwargs.get('global_parameters', None) + self.web_service_outputs = kwargs.get('web_service_outputs', None) + self.web_service_inputs = kwargs.get('web_service_inputs', None) + self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py new file mode 100644 index 000000000000..e273c0b38128 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs + self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py new file mode 100644 index 000000000000..08dfec98a6bf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMLLinkedService, self).__init__(**kwargs) + self.ml_endpoint = kwargs.get('ml_endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py new file mode 100644 index 000000000000..c77a692adc03 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py new file mode 100644 index 000000000000..c47a2d81648e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) + self.trained_model_name = kwargs.get('trained_model_name', None) + self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) + self.trained_model_file_path = kwargs.get('trained_model_file_path', None) + self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py new file mode 100644 index 000000000000..50a5932f0bf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path + self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py new file mode 100644 index 000000000000..682b24fed830 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, **kwargs): + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = kwargs.get('file_path', None) + self.linked_service_name = kwargs.get('linked_service_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py new file mode 100644 index 000000000000..abe75d9d9bf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = file_path + self.linked_service_name = linked_service_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py new file mode 100644 index 000000000000..aedbdbb73eb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py new file mode 100644 index 000000000000..57692275f564 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py new file mode 100644 index 000000000000..b3ee0bbc8645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py new file mode 100644 index 000000000000..340c10f5988b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py new file mode 100644 index 000000000000..823336432567 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py new file mode 100644 index 000000000000..7030738d2615 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py new file mode 100644 index 000000000000..8f5d43478089 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..7bd7eb6f17f8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py new file mode 100644 index 000000000000..92359d6d6a10 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py new file mode 100644 index 000000000000..47f8f17980f8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py new file mode 100644 index 000000000000..6214e1ba1f22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py new file mode 100644 index 000000000000..b7cd0ec51a29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py new file mode 100644 index 000000000000..e0cd62fd8028 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py new file mode 100644 index 000000000000..0362b0dca390 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py new file mode 100644 index 000000000000..933264b57a9b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) + self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..485dc3efb102 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema + self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py new file mode 100644 index 000000000000..9f3a63db4978 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureQueueSink, self).__init__(**kwargs) + self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py new file mode 100644 index 000000000000..db2fb60ddb1e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py new file mode 100644 index 000000000000..1239bbad78fc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchIndexDataset, self).__init__(**kwargs) + self.index_name = kwargs.get('index_name', None) + self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py new file mode 100644 index 000000000000..da5e92dd2edd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index_name = index_name + self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py new file mode 100644 index 000000000000..9aae64af8da0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureSearchIndexSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py new file mode 100644 index 000000000000..3cd887a2512c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py new file mode 100644 index 000000000000..782799cd5b28 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.key = kwargs.get('key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py new file mode 100644 index 000000000000..8589c3aead91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential + self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py new file mode 100644 index 000000000000..0da66637a04f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py new file mode 100644 index 000000000000..dbcf6c88b134 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py new file mode 100644 index 000000000000..cc7c9d58d19f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDWLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py new file mode 100644 index 000000000000..5c75f3904b37 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py new file mode 100644 index 000000000000..ed9fe8904d73 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDWTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py new file mode 100644 index 000000000000..a38e4ab479c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema + self.table = table + self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py new file mode 100644 index 000000000000..2aab3a145ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py new file mode 100644 index 000000000000..ec1a2e5e8549 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py new file mode 100644 index 000000000000..1128a9e8cb06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py new file mode 100644 index 000000000000..ac72614e3ed4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py new file mode 100644 index 000000000000..5d93df3d790a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..e4d5e66e18c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py new file mode 100644 index 000000000000..b6c62f9a3164 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py new file mode 100644 index 000000000000..ce8b08944f3a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py new file mode 100644 index 000000000000..3ed19ee47e7e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema + self.table = table + self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py new file mode 100644 index 000000000000..202dd7229b90 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py new file mode 100644 index 000000000000..4fac19b70849 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py new file mode 100644 index 000000000000..eb8dacbfbb98 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py new file mode 100644 index 000000000000..d70a15fdd6f1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py new file mode 100644 index 000000000000..3459c9ad3ba1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableSink, self).__init__(**kwargs) + self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) + self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) + self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) + self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) + self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py new file mode 100644 index 000000000000..a15247544879 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type + self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py new file mode 100644 index 000000000000..fa7ead73eaa9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableSource, self).__init__(**kwargs) + self.azure_table_source_query = kwargs.get('azure_table_source_query', None) + self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) + self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py new file mode 100644 index 000000000000..efbac5613219 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found + self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py new file mode 100644 index 000000000000..c2a8c2498ea6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureTableStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py new file mode 100644 index 000000000000..8d4e62c4f3e6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py new file mode 100644 index 000000000000..5f0f8ef96696 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(BinaryDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py new file mode 100644 index 000000000000..7d26b216fd7a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression = compression + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py new file mode 100644 index 000000000000..b991bfee53c7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py new file mode 100644 index 000000000000..80421d161aed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py new file mode 100644 index 000000000000..48e78e7d24bf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(BinarySource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py new file mode 100644 index 000000000000..aa9a9f1412ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py new file mode 100644 index 000000000000..673d34167fed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger import MultiplePipelineTrigger + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BlobEventsTrigger, self).__init__(**kwargs) + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.events = kwargs.get('events', None) + self.scope = kwargs.get('scope', None) + self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py new file mode 100644 index 000000000000..fb65a420a2cd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.events = events + self.scope = scope + self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py new file mode 100644 index 000000000000..284e0fcecde5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(BlobSink, self).__init__(**kwargs) + self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) + self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) + self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py new file mode 100644 index 000000000000..370acc72e017 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior + self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py new file mode 100644 index 000000000000..ab4313a890cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(BlobSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py new file mode 100644 index 000000000000..78d90cc61e13 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py new file mode 100644 index 000000000000..4fd5b5c17100 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger import MultiplePipelineTrigger + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, **kwargs): + super(BlobTrigger, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.linked_service = kwargs.get('linked_service', None) + self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py new file mode 100644 index 000000000000..cccffd881bfb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service + self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py new file mode 100644 index 000000000000..ebba2be42028 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py new file mode 100644 index 000000000000..f22f303cc61d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py new file mode 100644 index 000000000000..e7ba96c18682 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CassandraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.consistency_level = kwargs.get('consistency_level', None) + self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py new file mode 100644 index 000000000000..bd95d158b868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.consistency_level = consistency_level + self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py new file mode 100644 index 000000000000..b89c324fd4d4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.keyspace = kwargs.get('keyspace', None) + self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py new file mode 100644 index 000000000000..256358ce50cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.keyspace = keyspace + self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py new file mode 100644 index 000000000000..c7cd4c315852 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py new file mode 100644 index 000000000000..050bdb836963 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py new file mode 100644 index 000000000000..bbc8b7a0de65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py new file mode 100644 index 000000000000..1c4897c09868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py new file mode 100644 index 000000000000..0df48841cccc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py new file mode 100644 index 000000000000..80f85e6d5809 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py new file mode 100644 index 000000000000..13d2a6b921bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py new file mode 100644 index 000000000000..713db90f9623 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py new file mode 100644 index 000000000000..04179d0d1f53 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py new file mode 100644 index 000000000000..4411db6d2856 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py new file mode 100644 index 000000000000..e2595f9d8aff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py new file mode 100644 index 000000000000..9543a6395a32 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py new file mode 100644 index 000000000000..11ae557c0cda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py new file mode 100644 index 000000000000..ac8ae8fb5a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py new file mode 100644 index 000000000000..2242bc36beb2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity import Activity + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, **kwargs): + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py new file mode 100644 index 000000000000..0aabd99d741f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity_py3 import Activity + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py new file mode 100644 index 000000000000..2e7c00d551ba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, **kwargs): + super(CopyActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.sink = kwargs.get('sink', None) + self.translator = kwargs.get('translator', None) + self.enable_staging = kwargs.get('enable_staging', None) + self.staging_settings = kwargs.get('staging_settings', None) + self.parallel_copies = kwargs.get('parallel_copies', None) + self.data_integration_units = kwargs.get('data_integration_units', None) + self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) + self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py new file mode 100644 index 000000000000..f8a1fee5625d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve + self.inputs = inputs + self.outputs = outputs + self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py new file mode 100644 index 000000000000..cbe8f2ecf8f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, + DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, **kwargs): + super(CopySink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.write_batch_size = kwargs.get('write_batch_size', None) + self.write_batch_timeout = kwargs.get('write_batch_timeout', None) + self.sink_retry_count = kwargs.get('sink_retry_count', None) + self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py new file mode 100644 index 000000000000..3720bece5674 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, + DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySink, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py new file mode 100644 index 000000000000..b37bca86de5e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + JsonSource, DelimitedTextSource, ParquetSource, AvroSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + } + + def __init__(self, **kwargs): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py new file mode 100644 index 000000000000..22b8c590498e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + JsonSource, DelimitedTextSource, ParquetSource, AvroSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py new file mode 100644 index 000000000000..6a8a462f6f46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py new file mode 100644 index 000000000000..57dab80e06b9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.encrypted_credential = encrypted_credential + self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py new file mode 100644 index 000000000000..d86648eb5eee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py new file mode 100644 index 000000000000..de2650fa14b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py new file mode 100644 index 000000000000..f76e7c5f5a41 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py new file mode 100644 index 000000000000..2a72bfce4ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py new file mode 100644 index 000000000000..0d40b52add80 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py new file mode 100644 index 000000000000..5db1ee5c9d36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py new file mode 100644 index 000000000000..44dc7443427b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py new file mode 100644 index 000000000000..7d180f105abf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py new file mode 100644 index 000000000000..76e45648f941 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py new file mode 100644 index 000000000000..afe336f666de --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.cred_string = cred_string + self.encrypted_credential = encrypted_credential + self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py new file mode 100644 index 000000000000..b355605417d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py new file mode 100644 index 000000000000..1358fc20feba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py new file mode 100644 index 000000000000..821274b9aae4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py new file mode 100644 index 000000000000..cf5299fd55a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py new file mode 100644 index 000000000000..0e7002dcf68a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.data_factory_name = kwargs.get('data_factory_name', None) + self.data_factory_location = kwargs.get('data_factory_location', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py new file mode 100644 index 000000000000..aad7d6fa5ac0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = name + self.subscription_id = subscription_id + self.data_factory_name = data_factory_name + self.data_factory_location = data_factory_location diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py new file mode 100644 index 000000000000..18ec9f963e65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = kwargs.get('run_id', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py new file mode 100644 index 000000000000..bb280441ae90 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, *, run_id: str, **kwargs) -> None: + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = run_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py new file mode 100644 index 000000000000..01cfb7335d37 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.resource_linked_service = kwargs.get('resource_linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + self.reference_objects = kwargs.get('reference_objects', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py new file mode 100644 index 000000000000..bf8326f053f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days + self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py new file mode 100644 index 000000000000..5f95a54612dd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, **kwargs): + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = kwargs.get('linked_services', None) + self.datasets = kwargs.get('datasets', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py new file mode 100644 index 000000000000..f860f0141bd0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = linked_services + self.datasets = datasets diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py new file mode 100644 index 000000000000..db14a05e7ad1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomDataSourceLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py new file mode 100644 index 000000000000..f7633ee28cbd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py new file mode 100644 index 000000000000..a242309c3fd1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomDataset, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py new file mode 100644 index 000000000000..c00dae2b2c56 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py similarity index 100% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py new file mode 100644 index 000000000000..364dfd79d71a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) + self.priority = kwargs.get('priority', None) + self.parameters = kwargs.get('parameters', None) + self.runtime_version = kwargs.get('runtime_version', None) + self.compilation_mode = kwargs.get('compilation_mode', None) + self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py new file mode 100644 index 000000000000..22623aa3622c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode + self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py new file mode 100644 index 000000000000..a49bd973e2b9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksNotebookActivity, self).__init__(**kwargs) + self.notebook_path = kwargs.get('notebook_path', None) + self.base_parameters = kwargs.get('base_parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py new file mode 100644 index 000000000000..7d2d464b7a1a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries + self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py new file mode 100644 index 000000000000..51e7245d12fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.main_class_name = kwargs.get('main_class_name', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py new file mode 100644 index 000000000000..6c33f3b51d1e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py new file mode 100644 index 000000000000..56178d3882c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.python_file = kwargs.get('python_file', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py new file mode 100644 index 000000000000..5b16d0d5e9ef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py new file mode 100644 index 000000000000..e8e2974b4481 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, **kwargs): + super(Dataset, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.structure = kwargs.get('structure', None) + self.schema = kwargs.get('schema', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py new file mode 100644 index 000000000000..71b041c5eb5b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetBZip2Compression, self).__init__(**kwargs) + self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py new file mode 100644 index 000000000000..f97af4588e0a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py new file mode 100644 index 000000000000..c0c4e3d52624 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, **kwargs): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py new file mode 100644 index 000000000000..3b10abc69abf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py new file mode 100644 index 000000000000..9c97e2bfa5e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py new file mode 100644 index 000000000000..11d00081bc1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py new file mode 100644 index 000000000000..882c84a1e84c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py new file mode 100644 index 000000000000..ea7fc313f967 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(DatasetFolder, self).__init__(**kwargs) + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py new file mode 100644 index 000000000000..4925127c7f0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py new file mode 100644 index 000000000000..97346e06366d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py new file mode 100644 index 000000000000..2c318a91cccb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py new file mode 100644 index 000000000000..82550c2a0df8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(Dataset, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.structure = structure + self.schema = schema + self.linked_service_name = linked_service_name + self.parameters = parameters + self.annotations = annotations + self.folder = folder + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py new file mode 100644 index 000000000000..ca3d385f31ce --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, **kwargs): + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py new file mode 100644 index 000000000000..80162fd77da1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py new file mode 100644 index 000000000000..a68fb563e425 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, **kwargs): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py new file mode 100644 index 000000000000..9cedba8bbce9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class DatasetResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DatasetResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DatasetResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DatasetResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py new file mode 100644 index 000000000000..6eb099dcb884 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py new file mode 100644 index 000000000000..b3160565230d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, **kwargs): + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.serializer = kwargs.get('serializer', None) + self.deserializer = kwargs.get('deserializer', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py new file mode 100644 index 000000000000..faf746642d9e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.serializer = serializer + self.deserializer = deserializer + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py new file mode 100644 index 000000000000..ed80bf3cbcf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetZipDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py new file mode 100644 index 000000000000..20abd6fe1088 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py new file mode 100644 index 000000000000..d163d2b93c18 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2LinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py new file mode 100644 index 000000000000..44d784fa9bde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py new file mode 100644 index 000000000000..a6e8c31ffa1f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2Source, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py new file mode 100644 index 000000000000..20b169699ae0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py new file mode 100644 index 000000000000..7092d5fc6cb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2TableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.db2_table_dataset_schema = kwargs.get('db2_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py new file mode 100644 index 000000000000..3fa296454a69 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.db2_table_dataset_schema = db2_table_dataset_schema + self.table = table + self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py new file mode 100644 index 000000000000..34ba33a414d5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(DeleteActivity, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py new file mode 100644 index 000000000000..5107d9a3381a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset + self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py new file mode 100644 index 000000000000..bfee26fcd12c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py new file mode 100644 index 000000000000..364b103c426a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_settings import FormatReadSettings + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSettings, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py new file mode 100644 index 000000000000..62aa0327cfb9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_settings_py3 import FormatReadSettings + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py new file mode 100644 index 000000000000..15e0e590b4ee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..6481f8021527 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py new file mode 100644 index 000000000000..10a842ca374a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..e551e32c847e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py new file mode 100644 index 000000000000..5e0d8db319e5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSettings, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py new file mode 100644 index 000000000000..2be019ab1e6a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py new file mode 100644 index 000000000000..89e750df8f0d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs): + super(DependencyReference, self).__init__(**kwargs) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py new file mode 100644 index 000000000000..1b0647b74991 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs) -> None: + super(DependencyReference, self).__init__(**kwargs) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py new file mode 100644 index 000000000000..a8065ec3cc06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) + self.temp_script_path = kwargs.get('temp_script_path', None) + self.distcp_options = kwargs.get('distcp_options', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py new file mode 100644 index 000000000000..628e2d207f8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py new file mode 100644 index 000000000000..fb2b8d46fa9c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py new file mode 100644 index 000000000000..5eb4dbbf0997 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py new file mode 100644 index 000000000000..c2908dc1dd05 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionSink, self).__init__(**kwargs) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py new file mode 100644 index 000000000000..f1410cd211a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior + self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py new file mode 100644 index 000000000000..9fdd23f2795f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py new file mode 100644 index 000000000000..9e0bf6382b04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.nesting_separator = nesting_separator + self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py new file mode 100644 index 000000000000..c5428ace02a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py new file mode 100644 index 000000000000..5fb0cb25ecdb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py new file mode 100644 index 000000000000..9a3391f27786 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py new file mode 100644 index 000000000000..313183abab83 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py new file mode 100644 index 000000000000..3dfd5715deb9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) + self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py new file mode 100644 index 000000000000..db46bdc4e0bd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.drill_table_dataset_schema = drill_table_dataset_schema + self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py new file mode 100644 index 000000000000..5ff0b150718b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py new file mode 100644 index 000000000000..79d3a34ba313 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py new file mode 100644 index 000000000000..392b8ac7b971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py new file mode 100644 index 000000000000..6cade3e4aa59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py new file mode 100644 index 000000000000..619bad0f75c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py new file mode 100644 index 000000000000..7679e68bae7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py new file mode 100644 index 000000000000..ff4079761cf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py new file mode 100644 index 000000000000..4a1ef86b2dc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py new file mode 100644 index 000000000000..aad71042bb04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py new file mode 100644 index 000000000000..2286301fabef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py new file mode 100644 index 000000000000..2d0f462e0f59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py new file mode 100644 index 000000000000..d9f4fcf092c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py new file mode 100644 index 000000000000..641fad43f437 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py new file mode 100644 index 000000000000..29c3e78609a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py new file mode 100644 index 000000000000..435c6d153066 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py new file mode 100644 index 000000000000..7ee671890354 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py new file mode 100644 index 000000000000..c925033d1240 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py new file mode 100644 index 000000000000..07c028ff2477 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py new file mode 100644 index 000000000000..45bac7b52064 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py new file mode 100644 index 000000000000..5f736f9cf658 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py new file mode 100644 index 000000000000..d38f96fee911 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py new file mode 100644 index 000000000000..12d83625bc6a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py new file mode 100644 index 000000000000..6249c2e2334b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py new file mode 100644 index 000000000000..623d798036a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py new file mode 100644 index 000000000000..56adc0ce47c4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py new file mode 100644 index 000000000000..705f43cd225c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py new file mode 100644 index 000000000000..f016140189f1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py new file mode 100644 index 000000000000..d200ff32fd9d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py new file mode 100644 index 000000000000..5db1448a5a55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py new file mode 100644 index 000000000000..0008b5eee153 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ExecutePipelineActivity, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.parameters = kwargs.get('parameters', None) + self.wait_on_completion = kwargs.get('wait_on_completion', None) + self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py new file mode 100644 index 000000000000..addaafabe7b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion + self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py new file mode 100644 index 000000000000..9efa853dac86 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__(self, **kwargs): + super(ExecuteSSISPackageActivity, self).__init__(**kwargs) + self.package_location = kwargs.get('package_location', None) + self.runtime = kwargs.get('runtime', None) + self.logging_level = kwargs.get('logging_level', None) + self.environment_path = kwargs.get('environment_path', None) + self.execution_credential = kwargs.get('execution_credential', None) + self.connect_via = kwargs.get('connect_via', None) + self.project_parameters = kwargs.get('project_parameters', None) + self.package_parameters = kwargs.get('package_parameters', None) + self.project_connection_managers = kwargs.get('project_connection_managers', None) + self.package_connection_managers = kwargs.get('package_connection_managers', None) + self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) + self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py new file mode 100644 index 000000000000..64efa9cd63ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.log_location = log_location + self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py new file mode 100644 index 000000000000..8c16eff2c753 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity import Activity + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, **kwargs): + super(ExecutionActivity, self).__init__(**kwargs) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) + self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py new file mode 100644 index 000000000000..5deb58db81a7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity_py3 import Activity + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.linked_service_name = linked_service_name + self.policy = policy + self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py new file mode 100644 index 000000000000..a6a2cc280b4d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = kwargs.get('feature_name', None) + self.feature_type = kwargs.get('feature_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py new file mode 100644 index 000000000000..b3f4099fb972 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = feature_name + self.feature_type = feature_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py new file mode 100644 index 000000000000..868647e3c5b3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py new file mode 100644 index 000000000000..1ac7138e7984 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py new file mode 100644 index 000000000000..4b16ceca2794 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, **kwargs): + super(Expression, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py new file mode 100644 index 000000000000..c6ad023a57ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, *, value: str, **kwargs) -> None: + super(Expression, self).__init__(**kwargs) + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py new file mode 100644 index 000000000000..614b3d7fc97a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, **kwargs): + super(Factory, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.identity = kwargs.get('identity', None) + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py new file mode 100644 index 000000000000..02cec39d8313 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration import FactoryRepoConfiguration + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FactoryGitHubConfiguration, self).__init__(**kwargs) + self.host_name = kwargs.get('host_name', None) + self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py new file mode 100644 index 000000000000..23c5dbf21f0c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration_py3 import FactoryRepoConfiguration + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: + super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.host_name = host_name + self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py new file mode 100644 index 000000000000..dad745424af3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs): + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py new file mode 100644 index 000000000000..567100d8c19e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs) -> None: + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py new file mode 100644 index 000000000000..589b44defc56 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class FactoryPaged(Paged): + """ + A paging container for iterating over a list of :class:`Factory ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Factory]'} + } + + def __init__(self, *args, **kwargs): + + super(FactoryPaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py new file mode 100644 index 000000000000..0682aa5f8852 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource_py3 import Resource + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: + super(Factory, self).__init__(location=location, tags=tags, **kwargs) + self.additional_properties = additional_properties + self.identity = identity + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py new file mode 100644 index 000000000000..7c20db016c71 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, **kwargs): + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.repository_name = kwargs.get('repository_name', None) + self.collaboration_branch = kwargs.get('collaboration_branch', None) + self.root_folder = kwargs.get('root_folder', None) + self.last_commit_id = kwargs.get('last_commit_id', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py new file mode 100644 index 000000000000..eefed7978850 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = account_name + self.repository_name = repository_name + self.collaboration_branch = collaboration_branch + self.root_folder = root_folder + self.last_commit_id = last_commit_id + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py new file mode 100644 index 000000000000..44eac9d287ce --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, **kwargs): + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = kwargs.get('factory_resource_id', None) + self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py new file mode 100644 index 000000000000..68aca7a48db8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = factory_resource_id + self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py new file mode 100644 index 000000000000..e9977fceff86 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, **kwargs): + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py new file mode 100644 index 000000000000..5bd523fedf3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, *, tags=None, identity=None, **kwargs) -> None: + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = tags + self.identity = identity diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py new file mode 100644 index 000000000000..6d07c68d23e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration import FactoryRepoConfiguration + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FactoryVSTSConfiguration, self).__init__(**kwargs) + self.project_name = kwargs.get('project_name', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py new file mode 100644 index 000000000000..4f13c0959d63 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration_py3 import FactoryRepoConfiguration + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: + super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.project_name = project_name + self.tenant_id = tenant_id + self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py new file mode 100644 index 000000000000..ffced5c2e689 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py new file mode 100644 index 000000000000..ec6fe58bb3a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py new file mode 100644 index 000000000000..edce5fe68a65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py new file mode 100644 index 000000000000..da9d0809e03a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py new file mode 100644 index 000000000000..1fadb49b1795 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py new file mode 100644 index 000000000000..e3bc7946d1ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings import StoreWriteSettings + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSettings, self).__init__(**kwargs) + self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py new file mode 100644 index 000000000000..b174cf537577 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py new file mode 100644 index 000000000000..6874f4c08929 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(FileShareDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.file_filter = kwargs.get('file_filter', None) + self.compression = kwargs.get('compression', None) + self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py new file mode 100644 index 000000000000..19e88a264e12 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.file_filter = file_filter + self.compression = compression + self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py new file mode 100644 index 000000000000..8b8f238c9534 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileSystemSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py new file mode 100644 index 000000000000..24f8623cbb02 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py new file mode 100644 index 000000000000..2986b1848153 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileSystemSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py new file mode 100644 index 000000000000..0598490ca51c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py new file mode 100644 index 000000000000..1346bb234695 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, **kwargs): + super(FilterActivity, self).__init__(**kwargs) + self.items = kwargs.get('items', None) + self.condition = kwargs.get('condition', None) + self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py new file mode 100644 index 000000000000..a07cf01d1dd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.items = items + self.condition = condition + self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py new file mode 100644 index 000000000000..5edfa2a8140e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(ForEachActivity, self).__init__(**kwargs) + self.is_sequential = kwargs.get('is_sequential', None) + self.batch_count = kwargs.get('batch_count', None) + self.items = kwargs.get('items', None) + self.activities = kwargs.get('activities', None) + self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py new file mode 100644 index 000000000000..7c5c887bb1d9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities + self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py new file mode 100644 index 000000000000..d5213138b96a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py new file mode 100644 index 000000000000..326da0277b89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py new file mode 100644 index 000000000000..2100c6055d0d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py new file mode 100644 index 000000000000..4150eceffc1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py new file mode 100644 index 000000000000..e023f9ae91f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py new file mode 100644 index 000000000000..748d306307ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py new file mode 100644 index 000000000000..e649ca56e37c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py new file mode 100644 index 000000000000..b38ad1c03f46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py new file mode 100644 index 000000000000..5d5e933036df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py new file mode 100644 index 000000000000..7941189f2dcd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, **kwargs): + super(GetMetadataActivity, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + self.field_list = kwargs.get('field_list', None) + self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py new file mode 100644 index 000000000000..b4d8eb17cab1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.dataset = dataset + self.field_list = field_list + self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py new file mode 100644 index 000000000000..1be4a2afece0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = kwargs.get('metadata_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py new file mode 100644 index 000000000000..310cd9783d81 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, *, metadata_path: str=None, **kwargs) -> None: + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = metadata_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py new file mode 100644 index 000000000000..cadecdf70f44 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = kwargs.get('git_hub_access_code', None) + self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py new file mode 100644 index 000000000000..7961e1bc33ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = git_hub_access_code + self.git_hub_client_id = git_hub_client_id + self.git_hub_access_token_base_url = git_hub_access_token_base_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py new file mode 100644 index 000000000000..4a4afce8f0f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = kwargs.get('git_hub_access_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py new file mode 100644 index 000000000000..4f28ade6e914 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = git_hub_access_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py new file mode 100644 index 000000000000..c460dd95c380 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py new file mode 100644 index 000000000000..dfb3bc07e69f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py new file mode 100644 index 000000000000..92b901b774ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py new file mode 100644 index 000000000000..e1272f978b8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py new file mode 100644 index 000000000000..8699057abe09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py new file mode 100644 index 000000000000..995d5324670b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py new file mode 100644 index 000000000000..45a535b95d43 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQueryLinkedService, self).__init__(**kwargs) + self.project = kwargs.get('project', None) + self.additional_projects = kwargs.get('additional_projects', None) + self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py new file mode 100644 index 000000000000..146674a85531 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py new file mode 100644 index 000000000000..920489742bbf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + database + table properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py new file mode 100644 index 000000000000..205819f8eeef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + database + table properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.dataset = dataset + self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py new file mode 100644 index 000000000000..3a28d2563a8b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQuerySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py new file mode 100644 index 000000000000..49364b4d0e3f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py new file mode 100644 index 000000000000..57913f779ca1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py new file mode 100644 index 000000000000..bd707a5e85c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py new file mode 100644 index 000000000000..086f12419f4a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py new file mode 100644 index 000000000000..8b789deb43da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py new file mode 100644 index 000000000000..eb0ea08ee544 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) + self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py new file mode 100644 index 000000000000..7f37fff9108d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.greenplum_table_dataset_schema = greenplum_table_dataset_schema + self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py new file mode 100644 index 000000000000..b6affd5caa0d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py new file mode 100644 index 000000000000..a8823e2e8937 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py new file mode 100644 index 000000000000..5de32bcb6871 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py new file mode 100644 index 000000000000..27fc0d1514ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py new file mode 100644 index 000000000000..eb6e3f1789bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py new file mode 100644 index 000000000000..b2680e95c212 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py new file mode 100644 index 000000000000..4110b0f8b7de --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(HDInsightHiveActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py new file mode 100644 index 000000000000..f8a5441fe767 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout + self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py new file mode 100644 index 000000000000..810525342d82 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HDInsightLinkedService, self).__init__(**kwargs) + self.cluster_uri = kwargs.get('cluster_uri', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) + self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py new file mode 100644 index 000000000000..5c384f7d6288 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.file_system = file_system + self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py new file mode 100644 index 000000000000..20655843e1db --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightMapReduceActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.class_name = kwargs.get('class_name', None) + self.jar_file_path = kwargs.get('jar_file_path', None) + self.jar_linked_service = kwargs.get('jar_linked_service', None) + self.jar_libs = kwargs.get('jar_libs', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py new file mode 100644 index 000000000000..dffa9f119069 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines + self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py new file mode 100644 index 000000000000..d386aac9d9aa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -0,0 +1,237 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) + self.cluster_size = kwargs.get('cluster_size', None) + self.time_to_live = kwargs.get('time_to_live', None) + self.version = kwargs.get('version', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.host_subscription_id = kwargs.get('host_subscription_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.cluster_resource_group = kwargs.get('cluster_resource_group', None) + self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) + self.cluster_user_name = kwargs.get('cluster_user_name', None) + self.cluster_password = kwargs.get('cluster_password', None) + self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) + self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) + self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.cluster_type = kwargs.get('cluster_type', None) + self.spark_version = kwargs.get('spark_version', None) + self.core_configuration = kwargs.get('core_configuration', None) + self.h_base_configuration = kwargs.get('h_base_configuration', None) + self.hdfs_configuration = kwargs.get('hdfs_configuration', None) + self.hive_configuration = kwargs.get('hive_configuration', None) + self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) + self.oozie_configuration = kwargs.get('oozie_configuration', None) + self.storm_configuration = kwargs.get('storm_configuration', None) + self.yarn_configuration = kwargs.get('yarn_configuration', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.head_node_size = kwargs.get('head_node_size', None) + self.data_node_size = kwargs.get('data_node_size', None) + self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) + self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) + self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py new file mode 100644 index 000000000000..178585c9b51d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py @@ -0,0 +1,237 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name + self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py new file mode 100644 index 000000000000..61b939076db6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightPigActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py new file mode 100644 index 000000000000..fb149df91f39 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py new file mode 100644 index 000000000000..7822344f012f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightSparkActivity, self).__init__(**kwargs) + self.root_path = kwargs.get('root_path', None) + self.entry_file_path = kwargs.get('entry_file_path', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) + self.class_name = kwargs.get('class_name', None) + self.proxy_user = kwargs.get('proxy_user', None) + self.spark_config = kwargs.get('spark_config', None) + self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py new file mode 100644 index 000000000000..3f305901abb7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config + self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py new file mode 100644 index 000000000000..42146a5d6cc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightStreamingActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.mapper = kwargs.get('mapper', None) + self.reducer = kwargs.get('reducer', None) + self.input = kwargs.get('input', None) + self.output = kwargs.get('output', None) + self.file_paths = kwargs.get('file_paths', None) + self.file_linked_service = kwargs.get('file_linked_service', None) + self.combiner = kwargs.get('combiner', None) + self.command_environment = kwargs.get('command_environment', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py new file mode 100644 index 000000000000..2f5a301ff880 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines + self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py new file mode 100644 index 000000000000..b527f05a7e2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(HdfsLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py new file mode 100644 index 000000000000..e004701e1da0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password + self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py new file mode 100644 index 000000000000..a8f5d1ba332c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py new file mode 100644 index 000000000000..ec4b98c50385 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py new file mode 100644 index 000000000000..c37a045ec93c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py new file mode 100644 index 000000000000..be50590f6c32 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py new file mode 100644 index 000000000000..3c60cab46289 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.distcp_settings = distcp_settings + self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py new file mode 100644 index 000000000000..c54c1393d56e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -0,0 +1,147 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.service_discovery_mode = kwargs.get('service_discovery_mode', None) + self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) + self.use_native_query = kwargs.get('use_native_query', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py new file mode 100644 index 000000000000..611d30ecb781 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py @@ -0,0 +1,147 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py new file mode 100644 index 000000000000..07b6f2b54901 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) + self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py new file mode 100644 index 000000000000..69384bdfa99a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.hive_object_dataset_schema = hive_object_dataset_schema + self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py new file mode 100644 index 000000000000..3af88c3280e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py new file mode 100644 index 000000000000..6c09191b8c1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py new file mode 100644 index 000000000000..f2184dea151f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(HttpDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py new file mode 100644 index 000000000000..09f97a03a95d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: + super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.format = format + self.compression = compression + self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py new file mode 100644 index 000000000000..6232bc45fee4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.embedded_cert_data = kwargs.get('embedded_cert_data', None) + self.cert_thumbprint = kwargs.get('cert_thumbprint', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py new file mode 100644 index 000000000000..7f70adb08425 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint + self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py new file mode 100644 index 000000000000..a7c175da3489 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSettings, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py new file mode 100644 index 000000000000..7cea9207c996 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py new file mode 100644 index 000000000000..94106fae9d15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py new file mode 100644 index 000000000000..ae131aa16c8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py new file mode 100644 index 000000000000..df339fc3aef7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py new file mode 100644 index 000000000000..3d0d6cb3a6f4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.access_token = kwargs.get('access_token', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py new file mode 100644 index 000000000000..272d613e9cd1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py new file mode 100644 index 000000000000..ce8994b4db4a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py new file mode 100644 index 000000000000..bd2309101f72 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py new file mode 100644 index 000000000000..b4b4c618c33e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py new file mode 100644 index 000000000000..a29811342ce0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py new file mode 100644 index 000000000000..a8cb1da690e1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(IfConditionActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.if_true_activities = kwargs.get('if_true_activities', None) + self.if_false_activities = kwargs.get('if_false_activities', None) + self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py new file mode 100644 index 000000000000..7921a2602807 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities + self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py new file mode 100644 index 000000000000..a704852652db --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py new file mode 100644 index 000000000000..55b2e0c861d7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py new file mode 100644 index 000000000000..8faee4f09240 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) + self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py new file mode 100644 index 000000000000..5652b5c9e4b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.impala_object_dataset_schema = impala_object_dataset_schema + self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py new file mode 100644 index 000000000000..9e27dbdb6266 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py new file mode 100644 index 000000000000..f7dc4016d020 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py new file mode 100644 index 000000000000..2a58e7a0f7d3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py new file mode 100644 index 000000000000..03aadada664d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py new file mode 100644 index 000000000000..c511f4ecc174 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py new file mode 100644 index 000000000000..b0681ec0d423 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py new file mode 100644 index 000000000000..6cab908c7014 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py new file mode 100644 index 000000000000..ed8fb0221239 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py new file mode 100644 index 000000000000..8b7364bff652 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py new file mode 100644 index 000000000000..05c458e797b1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py new file mode 100644 index 000000000000..5dd45d16f76e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, **kwargs): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py new file mode 100644 index 000000000000..12ed6925585e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = kwargs.get('auth_key1', None) + self.auth_key2 = kwargs.get('auth_key2', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py new file mode 100644 index 000000000000..b807d4cd5b55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = auth_key1 + self.auth_key2 = auth_key2 diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py new file mode 100644 index 000000000000..e387ef4077f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.v_net_properties = kwargs.get('v_net_properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py new file mode 100644 index 000000000000..f47f339dd067 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.v_net_properties = v_net_properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py new file mode 100644 index 000000000000..c185f916e8e5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py new file mode 100644 index 000000000000..8cc5aceb16d7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py new file mode 100644 index 000000000000..44cd5fe5979b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py new file mode 100644 index 000000000000..7f3c08c0b339 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py new file mode 100644 index 000000000000..ebc0e9b38d6f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py new file mode 100644 index 000000000000..f7b695729403 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.nodes = kwargs.get('nodes', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py new file mode 100644 index 000000000000..16f3b656c9cc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = name + self.nodes = nodes diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py new file mode 100644 index 000000000000..2edabd3e2472 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py new file mode 100644 index 000000000000..476be9815984 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py new file mode 100644 index 000000000000..9d27bedf70aa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py new file mode 100644 index 000000000000..35c7e664b2ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py new file mode 100644 index 000000000000..b4056a07591b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py new file mode 100644 index 000000000000..7461d29de284 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, **kwargs): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py new file mode 100644 index 000000000000..56fd3608ba61 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py new file mode 100644 index 000000000000..3cd91195af1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py new file mode 100644 index 000000000000..f3846cf8ec55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, *, key_name=None, **kwargs) -> None: + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py new file mode 100644 index 000000000000..b18f376d3698 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py new file mode 100644 index 000000000000..cef89866884e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class IntegrationRuntimeResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`IntegrationRuntimeResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[IntegrationRuntimeResource]'} + } + + def __init__(self, *args, **kwargs): + + super(IntegrationRuntimeResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py new file mode 100644 index 000000000000..9239f54166f9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py new file mode 100644 index 000000000000..3399f8f38300 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py new file mode 100644 index 000000000000..27996bb4aeb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py new file mode 100644 index 000000000000..293f071aa0b3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py new file mode 100644 index 000000000000..f75775e29a7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py new file mode 100644 index 000000000000..64da6347f9ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.data_factory_name = None + self.state = None + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py new file mode 100644 index 000000000000..9382b4b08fde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py new file mode 100644 index 000000000000..bed71f74ffc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value, next_link: str=None, **kwargs) -> None: + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py new file mode 100644 index 000000000000..8541e04dc679 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_factory_name = None + self.state = None + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py new file mode 100644 index 000000000000..901b4d8b7442 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py new file mode 100644 index 000000000000..64d84a1e4f19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py new file mode 100644 index 000000000000..752b5b99eb60 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py new file mode 100644 index 000000000000..32e8beb31ea1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py new file mode 100644 index 000000000000..517cdd63caa5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py new file mode 100644 index 000000000000..82dc8d578da3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py new file mode 100644 index 000000000000..1c2b12c18e15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py new file mode 100644 index 000000000000..3c061b238cde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py new file mode 100644 index 000000000000..709da0ce1205 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py new file mode 100644 index 000000000000..c958c8351bb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py new file mode 100644 index 000000000000..c1cee8f00b8d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(JsonDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression = kwargs.get('compression', None) + self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py new file mode 100644 index 000000000000..564fe3bebf6b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.encoding_name = encoding_name + self.compression = compression + self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py new file mode 100644 index 000000000000..80f4ff0aaf8b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JsonFormat, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.json_node_reference = kwargs.get('json_node_reference', None) + self.json_path_definition = kwargs.get('json_path_definition', None) + self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py new file mode 100644 index 000000000000..2fdb44cc3b7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py new file mode 100644 index 000000000000..829344338672 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, **kwargs): + super(JsonSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py new file mode 100644 index 000000000000..3212bb4784d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py new file mode 100644 index 000000000000..a3349ee1d39e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(JsonSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py new file mode 100644 index 000000000000..7e5b73662801 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py new file mode 100644 index 000000000000..287da9805170 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings import FormatWriteSettings + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JsonWriteSettings, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py new file mode 100644 index 000000000000..f78f57eb1187 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, file_pattern=None, **kwargs) -> None: + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.file_pattern = file_pattern diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py new file mode 100644 index 000000000000..f4a4e7eb8bf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py new file mode 100644 index 000000000000..b7be47e8f096 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = kwargs.get('key', None) + self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py new file mode 100644 index 000000000000..4a2ebd8d1003 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, *, key, **kwargs) -> None: + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = key + self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py new file mode 100644 index 000000000000..6c831ab5f511 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py new file mode 100644 index 000000000000..3fbc8dd9cac2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) + self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py new file mode 100644 index 000000000000..055b64809e18 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, *, resource_id: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = resource_id + self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py new file mode 100644 index 000000000000..807757332b3e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = kwargs.get('linked_factory_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py new file mode 100644 index 000000000000..45362ab63ba3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, *, linked_factory_name: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = linked_factory_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py new file mode 100644 index 000000000000..446395bb9cbf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py new file mode 100644 index 000000000000..79468dc450d2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py new file mode 100644 index 000000000000..2778a33fbb5a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, **kwargs): + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.connect_via = kwargs.get('connect_via', None) + self.description = kwargs.get('description', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py new file mode 100644 index 000000000000..2b3e475c3075 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.connect_via = connect_via + self.description = description + self.parameters = parameters + self.annotations = annotations + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py new file mode 100644 index 000000000000..28ffeda7d01a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, **kwargs): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py new file mode 100644 index 000000000000..b6238130bdb6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py new file mode 100644 index 000000000000..75828718f589 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, **kwargs): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py new file mode 100644 index 000000000000..af0a57170e56 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class LinkedServiceResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`LinkedServiceResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[LinkedServiceResource]'} + } + + def __init__(self, *args, **kwargs): + + super(LinkedServiceResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py new file mode 100644 index 000000000000..1fa964b51f57 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py new file mode 100644 index 000000000000..81b4e7ca619e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py new file mode 100644 index 000000000000..4850b7adacdf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py new file mode 100644 index 000000000000..62584b2f704a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LookupActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.dataset = kwargs.get('dataset', None) + self.first_row_only = kwargs.get('first_row_only', None) + self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py new file mode 100644 index 000000000000..41061675ebbe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: + super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.dataset = dataset + self.first_row_only = first_row_only + self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py new file mode 100644 index 000000000000..9d65437b5daa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py new file mode 100644 index 000000000000..74de1573118b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py new file mode 100644 index 000000000000..ad540093ca55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py new file mode 100644 index 000000000000..481732bb688a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py new file mode 100644 index 000000000000..df49fe63a544 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py new file mode 100644 index 000000000000..15efcc12a054 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py new file mode 100644 index 000000000000..9cbc9e94e7c3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime import IntegrationRuntime + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntime, self).__init__(**kwargs) + self.state = None + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) + self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py new file mode 100644 index 000000000000..c70323697fdf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.time = None + self.code = None + self.parameters = None + self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py new file mode 100644 index 000000000000..1668c5196537 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.time = None + self.code = None + self.parameters = None + self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py new file mode 100644 index 000000000000..e9c0169cf6c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_id = None + self.status = None + self.errors = kwargs.get('errors', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py new file mode 100644 index 000000000000..0e8104d0de05 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_id = None + self.status = None + self.errors = errors diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py new file mode 100644 index 000000000000..2329f7a2ba36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py new file mode 100644 index 000000000000..58a80c0e600e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py new file mode 100644 index 000000000000..0e71d8b09f4e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_py3 import IntegrationRuntime + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py new file mode 100644 index 000000000000..17d21775f09f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status import IntegrationRuntimeStatus + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py new file mode 100644 index 000000000000..03d9451045bd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status_py3 import IntegrationRuntimeStatus + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py new file mode 100644 index 000000000000..3bbe048d4877 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py new file mode 100644 index 000000000000..475284d56038 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py new file mode 100644 index 000000000000..a744c1c5ff8f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py new file mode 100644 index 000000000000..472877b8f0bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py new file mode 100644 index 000000000000..66dc9c8ea9b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..ac3c8cf2ea72 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py new file mode 100644 index 000000000000..2a9e76446122 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py new file mode 100644 index 000000000000..dc326f24acd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py new file mode 100644 index 000000000000..63daa10047b9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py new file mode 100644 index 000000000000..7179d5af53dd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py new file mode 100644 index 000000000000..6d2061ef0dee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py new file mode 100644 index 000000000000..573dc0439754 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py new file mode 100644 index 000000000000..b53164f6266b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py new file mode 100644 index 000000000000..c9f79c24adf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py new file mode 100644 index 000000000000..53406fa25022 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py new file mode 100644 index 000000000000..700db840c03d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py new file mode 100644 index 000000000000..73cd3a64184c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py new file mode 100644 index 000000000000..1cccd82c8b19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py new file mode 100644 index 000000000000..f312dae024f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py new file mode 100644 index 000000000000..3fad904ef58b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py new file mode 100644 index 000000000000..796c5e14eaca --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py new file mode 100644 index 000000000000..68fe2affb0e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py new file mode 100644 index 000000000000..a2d2127d1397 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py new file mode 100644 index 000000000000..e1e3f50d1539 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py new file mode 100644 index 000000000000..76d162b0ff70 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.database_name = kwargs.get('database_name', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.auth_source = kwargs.get('auth_source', None) + self.port = kwargs.get('port', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py new file mode 100644 index 000000000000..95308b6ea8f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.authentication_type = authentication_type + self.database_name = database_name + self.username = username + self.password = password + self.auth_source = auth_source + self.port = port + self.enable_ssl = enable_ssl + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py new file mode 100644 index 000000000000..3da4b931f5e5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py new file mode 100644 index 000000000000..ab3e5b6e0cc9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py new file mode 100644 index 000000000000..17089373d4c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py new file mode 100644 index 000000000000..ad1e5c538645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py new file mode 100644 index 000000000000..bb29fc767420 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py new file mode 100644 index 000000000000..d1388ce797a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py new file mode 100644 index 000000000000..e951674a8e22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py new file mode 100644 index 000000000000..9b8eec114a06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py new file mode 100644 index 000000000000..1be28aa1b6ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, **kwargs): + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.pipelines = kwargs.get('pipelines', None) + self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py new file mode 100644 index 000000000000..206ab74ef419 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipelines = pipelines + self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py new file mode 100644 index 000000000000..ec85b0136714 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py new file mode 100644 index 000000000000..b8038df22fd6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py new file mode 100644 index 000000000000..c2b0b66eabb1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py new file mode 100644 index 000000000000..3a0315d83979 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py new file mode 100644 index 000000000000..3bb1584975d5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..33263561dfde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py new file mode 100644 index 000000000000..5d94bdecaf62 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py new file mode 100644 index 000000000000..2fcc288fd5b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py new file mode 100644 index 000000000000..b6c1ca9ba5da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py new file mode 100644 index 000000000000..9f071eae60ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py new file mode 100644 index 000000000000..3c66032bf48d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(NetezzaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py new file mode 100644 index 000000000000..f5dcc07e63d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py new file mode 100644 index 000000000000..b7807273262b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.netezza_table_dataset_schema = kwargs.get('netezza_table_dataset_schema', None) + self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py new file mode 100644 index 000000000000..29dd448ada75 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.netezza_table_dataset_schema = netezza_table_dataset_schema + self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py new file mode 100644 index 000000000000..01db8d71e924 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py new file mode 100644 index 000000000000..fcf2d8bb9819 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: + super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password + self.encrypted_credential = encrypted_credential + self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py new file mode 100644 index 000000000000..658cf40c8d2b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py new file mode 100644 index 000000000000..5951a2cf6d80 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py new file mode 100644 index 000000000000..c70f440ff6cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py new file mode 100644 index 000000000000..83ba9bd7f2af --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py new file mode 100644 index 000000000000..53d21dee2def --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py new file mode 100644 index 000000000000..2e376d23c67a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py new file mode 100644 index 000000000000..ced7e1dbd9e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py new file mode 100644 index 000000000000..9a181f8df7e9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py new file mode 100644 index 000000000000..9761d0c0aeb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py new file mode 100644 index 000000000000..52b059a8ad91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py new file mode 100644 index 000000000000..2f4f4261f4fc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py new file mode 100644 index 000000000000..070ddccd180d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py new file mode 100644 index 000000000000..baa90666d669 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py new file mode 100644 index 000000000000..5517f7daf9e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py new file mode 100644 index 000000000000..2dc98897482a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py new file mode 100644 index 000000000000..5a69c0d895fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py new file mode 100644 index 000000000000..de19818aaa7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py new file mode 100644 index 000000000000..fc2c4b095904 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py new file mode 100644 index 000000000000..db8cde8db784 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, **kwargs): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.origin = kwargs.get('origin', None) + self.display = kwargs.get('display', None) + self.service_specification = kwargs.get('service_specification', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py new file mode 100644 index 000000000000..1d96541c0581 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationDisplay, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py new file mode 100644 index 000000000000..dfbb782627f4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: + super(OperationDisplay, self).__init__(**kwargs) + self.description = description + self.provider = provider + self.resource = resource + self.operation = operation diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py new file mode 100644 index 000000000000..93bfaf4ed0de --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.blob_duration = kwargs.get('blob_duration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py new file mode 100644 index 000000000000..2cdd941fab7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py new file mode 100644 index 000000000000..974e0cbf4b0b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = kwargs.get('time_grain', None) + self.blob_duration = kwargs.get('blob_duration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py new file mode 100644 index 000000000000..312b83a23701 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = time_grain + self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py new file mode 100644 index 000000000000..24232e7b5470 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py new file mode 100644 index 000000000000..1d8610b7fab8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.to_be_exported_for_shoebox = to_be_exported_for_shoebox diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py new file mode 100644 index 000000000000..77f533fdcebf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, **kwargs): + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.display_description = kwargs.get('display_description', None) + self.unit = kwargs.get('unit', None) + self.aggregation_type = kwargs.get('aggregation_type', None) + self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) + self.source_mdm_account = kwargs.get('source_mdm_account', None) + self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) + self.availabilities = kwargs.get('availabilities', None) + self.dimensions = kwargs.get('dimensions', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py new file mode 100644 index 000000000000..c1cc4ad39e72 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.aggregation_type = aggregation_type + self.enable_regional_mdm_account = enable_regional_mdm_account + self.source_mdm_account = source_mdm_account + self.source_mdm_namespace = source_mdm_namespace + self.availabilities = availabilities + self.dimensions = dimensions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py new file mode 100644 index 000000000000..d6eea01bbdb9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class OperationPaged(Paged): + """ + A paging container for iterating over a list of :class:`Operation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Operation]'} + } + + def __init__(self, *args, **kwargs): + + super(OperationPaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py new file mode 100644 index 000000000000..23305038a090 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: + super(Operation, self).__init__(**kwargs) + self.name = name + self.origin = origin + self.display = display + self.service_specification = service_specification diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py new file mode 100644 index 000000000000..82622a44af5a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, **kwargs): + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = kwargs.get('log_specifications', None) + self.metric_specifications = kwargs.get('metric_specifications', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py new file mode 100644 index 000000000000..4215dac6eb7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py new file mode 100644 index 000000000000..19f715dfd9e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py new file mode 100644 index 000000000000..a46f0463afb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..b4e9aa1b92f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..10641aab7f9f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py new file mode 100644 index 000000000000..44ce000868b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..8732e2e82ca0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py new file mode 100644 index 000000000000..35ce3439d8a0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..a478e1abc828 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py new file mode 100644 index 000000000000..f42291941393 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py new file mode 100644 index 000000000000..1fa5d6eb3748 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py new file mode 100644 index 000000000000..1f6c747c49db --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py new file mode 100644 index 000000000000..3a571c66732a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py new file mode 100644 index 000000000000..db436192eca1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(OracleSource, self).__init__(**kwargs) + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py new file mode 100644 index 000000000000..0a871809896e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py new file mode 100644 index 000000000000..c76b5ced3e5c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py new file mode 100644 index 000000000000..b588fbac5244 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: + super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.oracle_table_dataset_schema = oracle_table_dataset_schema + self.table = table + self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py new file mode 100644 index 000000000000..8f0a0322062c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OrcFormat, self).__init__(**kwargs) + self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py new file mode 100644 index 000000000000..40a0e389ccc3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py new file mode 100644 index 000000000000..aef855d955f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParameterSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py new file mode 100644 index 000000000000..d5b6f981d365 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(ParameterSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py new file mode 100644 index 000000000000..ffaf8e1f6d93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py new file mode 100644 index 000000000000..d742ff24b522 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ParquetFormat, self).__init__(**kwargs) + self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py new file mode 100644 index 000000000000..36a6f5c88c4d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py new file mode 100644 index 000000000000..dea3e0f8fc52 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..463044fef83f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py new file mode 100644 index 000000000000..ab888c7361a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..332a7b9b8c5e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py new file mode 100644 index 000000000000..d7ae0bc075e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py new file mode 100644 index 000000000000..c11cda7a52f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py new file mode 100644 index 000000000000..d0fdc678841b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py new file mode 100644 index 000000000000..55df7c97166d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py new file mode 100644 index 000000000000..94cdbccae6ee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py new file mode 100644 index 000000000000..05730d0ae067 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py new file mode 100644 index 000000000000..308a8e4cf592 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py new file mode 100644 index 000000000000..de8210c2cc89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py new file mode 100644 index 000000000000..ccaa2eb49abd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) + self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py new file mode 100644 index 000000000000..cda4dc41dc22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: + super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.phoenix_object_dataset_schema = phoenix_object_dataset_schema + self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py new file mode 100644 index 000000000000..30171c6177ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py new file mode 100644 index 000000000000..1384f59e1aa4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py new file mode 100644 index 000000000000..bebc05cb1824 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py new file mode 100644 index 000000000000..02c9b8dbbff1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(PipelineFolder, self).__init__(**kwargs) + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py new file mode 100644 index 000000000000..aa8b23e62932 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, **kwargs): + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py new file mode 100644 index 000000000000..ce63f06092d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py new file mode 100644 index 000000000000..a39deaccc87b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, **kwargs): + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.activities = kwargs.get('activities', None) + self.parameters = kwargs.get('parameters', None) + self.variables = kwargs.get('variables', None) + self.concurrency = kwargs.get('concurrency', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py new file mode 100644 index 000000000000..a7c7ed553c07 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class PipelineResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`PipelineResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PipelineResource]'} + } + + def __init__(self, *args, **kwargs): + + super(PipelineResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py new file mode 100644 index 000000000000..8299cdb73887 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.activities = activities + self.parameters = parameters + self.variables = variables + self.concurrency = concurrency + self.annotations = annotations + self.folder = folder diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py new file mode 100644 index 000000000000..a2407bd9835f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py new file mode 100644 index 000000000000..acefb80fd078 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py new file mode 100644 index 000000000000..c954a18b8a67 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py new file mode 100644 index 000000000000..33e0f23f24ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py new file mode 100644 index 000000000000..c4591c5467ba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py new file mode 100644 index 000000000000..fbc689ec1632 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py new file mode 100644 index 000000000000..5a261d8fea84 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reject_type = kwargs.get('reject_type', None) + self.reject_value = kwargs.get('reject_value', None) + self.reject_sample_value = kwargs.get('reject_sample_value', None) + self.use_type_default = kwargs.get('use_type_default', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py new file mode 100644 index 000000000000..baae78b14c5f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py new file mode 100644 index 000000000000..f8ce5bd0803e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py new file mode 100644 index 000000000000..0221aa620064 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py new file mode 100644 index 000000000000..51dd25b25c60 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py new file mode 100644 index 000000000000..8aa12e4bdf8d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py new file mode 100644 index 000000000000..031a2479815b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.postgre_sql_table_dataset_schema = kwargs.get('postgre_sql_table_dataset_schema', None) + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..8adb7bd409ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py new file mode 100644 index 000000000000..21f18f07b262 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -0,0 +1,132 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.server_version = kwargs.get('server_version', None) + self.catalog = kwargs.get('catalog', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.time_zone_id = kwargs.get('time_zone_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py new file mode 100644 index 000000000000..75ab99d5a58f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py @@ -0,0 +1,132 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: + super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.server_version = server_version + self.catalog = catalog + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.time_zone_id = time_zone_id + self.encrypted_credential = encrypted_credential + self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py new file mode 100644 index 000000000000..eb80e1a97750 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) + self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py new file mode 100644 index 000000000000..e3bd2f7e36aa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: + super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.presto_object_dataset_schema = presto_object_dataset_schema + self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py new file mode 100644 index 000000000000..9b7274011265 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py new file mode 100644 index 000000000000..47fe3eb5f790 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py new file mode 100644 index 000000000000..6353c1cda96a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.company_id = kwargs.get('company_id', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.consumer_secret = kwargs.get('consumer_secret', None) + self.access_token = kwargs.get('access_token', None) + self.access_token_secret = kwargs.get('access_token_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py new file mode 100644 index 000000000000..be12fc5cfba5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: + super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.company_id = company_id + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = access_token + self.access_token_secret = access_token_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.encrypted_credential = encrypted_credential + self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py new file mode 100644 index 000000000000..73446d0ed938 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py new file mode 100644 index 000000000000..65f67d2b20af --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py new file mode 100644 index 000000000000..cce0a026ae5a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py new file mode 100644 index 000000000000..a00f35d4e1c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py new file mode 100644 index 000000000000..f23d452392b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, **kwargs): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py new file mode 100644 index 000000000000..bbbe1fa28f17 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py new file mode 100644 index 000000000000..10aea1f00163 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py new file mode 100644 index 000000000000..fbe44fa3f021 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py new file mode 100644 index 000000000000..a2e3bddb9425 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py new file mode 100644 index 000000000000..b47878ef4354 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py new file mode 100644 index 000000000000..7114b85e10db --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) + self.bucket_name = kwargs.get('bucket_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py new file mode 100644 index 000000000000..a40d014a32f9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py new file mode 100644 index 000000000000..2450f31222df --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RelationalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py new file mode 100644 index 000000000000..f88383cbd729 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py new file mode 100644 index 000000000000..e5dd2e0786c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RelationalTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py new file mode 100644 index 000000000000..3c85d95f8033 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py new file mode 100644 index 000000000000..8de6a70ecc99 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, **kwargs): + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py new file mode 100644 index 000000000000..23d971c1082e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class RerunTriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`RerunTriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py new file mode 100644 index 000000000000..19814ad0d76f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py new file mode 100644 index 000000000000..8c5ca2d67f3c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.parent_trigger = kwargs.get('parent_trigger', None) + self.requested_start_time = kwargs.get('requested_start_time', None) + self.requested_end_time = kwargs.get('requested_end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py new file mode 100644 index 000000000000..4b87f070b6be --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py new file mode 100644 index 000000000000..6fadecca588b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..4a7a20759c1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency + self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py new file mode 100644 index 000000000000..f6b2d7d3b512 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py new file mode 100644 index 000000000000..cfc0e4b09aa5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py new file mode 100644 index 000000000000..16d1af502787 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py new file mode 100644 index 000000000000..6d8a74a0a34b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py new file mode 100644 index 000000000000..f459e69113a1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py new file mode 100644 index 000000000000..c5f375910aaf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py new file mode 100644 index 000000000000..fd25b8e71377 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py new file mode 100644 index 000000000000..8d5e4ac091f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py new file mode 100644 index 000000000000..9a5d41858e54 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py new file mode 100644 index 000000000000..99f39c97f373 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py new file mode 100644 index 000000000000..0fbb15654438 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py new file mode 100644 index 000000000000..9af9f609e52b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py new file mode 100644 index 000000000000..f32d4d67e427 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py new file mode 100644 index 000000000000..5fcbb2f7a76d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py new file mode 100644 index 000000000000..e6f5b1876259 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py new file mode 100644 index 000000000000..b51b87a49938 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py new file mode 100644 index 000000000000..9271f7adf029 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, **kwargs): + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.last_updated_after = kwargs.get('last_updated_after', None) + self.last_updated_before = kwargs.get('last_updated_before', None) + self.filters = kwargs.get('filters', None) + self.order_by = kwargs.get('order_by', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py new file mode 100644 index 000000000000..c96e64eb63b3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.last_updated_after = last_updated_after + self.last_updated_before = last_updated_before + self.filters = filters + self.order_by = order_by diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py new file mode 100644 index 000000000000..7d54150a6815 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = kwargs.get('operand', None) + self.operator = kwargs.get('operator', None) + self.values = kwargs.get('values', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py new file mode 100644 index 000000000000..814e7a4b499b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, *, operand, operator, values, **kwargs) -> None: + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = operand + self.operator = operator + self.values = values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py new file mode 100644 index 000000000000..21afabcf215f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = kwargs.get('order_by', None) + self.order = kwargs.get('order', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py new file mode 100644 index 000000000000..a3ddc8854d47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, *, order_by, order, **kwargs) -> None: + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = order_by + self.order = order diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py new file mode 100644 index 000000000000..c644ac664831 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py new file mode 100644 index 000000000000..05fcea7a3990 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.encrypted_credential = encrypted_credential + self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py new file mode 100644 index 000000000000..93b4fcdb3d1f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py new file mode 100644 index 000000000000..d7e09e27a43f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py new file mode 100644 index 000000000000..20f581ce1c50 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..526ac806649f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py new file mode 100644 index 000000000000..09a0eca1758e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py new file mode 100644 index 000000000000..9b898af0c3a1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py new file mode 100644 index 000000000000..10cfce97fe0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py new file mode 100644 index 000000000000..3c3f75d6059e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py new file mode 100644 index 000000000000..fb6476ac9a30 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..3f0b3cc64d91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py new file mode 100644 index 000000000000..1f5cb3bb5bf1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..d215f5f0084d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py new file mode 100644 index 000000000000..99e2b1a2c924 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py new file mode 100644 index 000000000000..2abfaa12d0e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py new file mode 100644 index 000000000000..255bfab477bc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py new file mode 100644 index 000000000000..77bb267f5a47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py new file mode 100644 index 000000000000..9a1291bd4bfe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py new file mode 100644 index 000000000000..54a56618d01e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py new file mode 100644 index 000000000000..4f2590c3ab9d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py new file mode 100644 index 000000000000..4441e92eaff3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py new file mode 100644 index 000000000000..048d26f85696 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapBwCubeDataset, self).__init__(**kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py new file mode 100644 index 000000000000..08334a824ba4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py new file mode 100644 index 000000000000..a57164c7215d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBWLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py new file mode 100644 index 000000000000..92aef25dc215 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py new file mode 100644 index 000000000000..e3762d8e694e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBwSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py new file mode 100644 index 000000000000..ed6ff734742d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py new file mode 100644 index 000000000000..53d47ab8ae41 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py new file mode 100644 index 000000000000..9e47fd696503 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py new file mode 100644 index 000000000000..436b251207a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py new file mode 100644 index 000000000000..455bad7c9095 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py new file mode 100644 index 000000000000..e5a37858abb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py new file mode 100644 index 000000000000..29f01fdd6891 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py new file mode 100644 index 000000000000..561c1b342f93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py new file mode 100644 index 000000000000..e9dab6ad1899 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py new file mode 100644 index 000000000000..0ca69242055f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapEccLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py new file mode 100644 index 000000000000..7afd76b8fe09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: + super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py new file mode 100644 index 000000000000..f79367f49b3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapEccResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py new file mode 100644 index 000000000000..76aaeb9bb9f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py new file mode 100644 index 000000000000..6379c33713d4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapEccSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py new file mode 100644 index 000000000000..4412cac39960 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py new file mode 100644 index 000000000000..14eda87b7be6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py new file mode 100644 index 000000000000..de378a5b2bf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py new file mode 100644 index 000000000000..e946dbcd9a50 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py new file mode 100644 index 000000000000..730326c19183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.packet_size = packet_size + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py new file mode 100644 index 000000000000..6ff1ae31cd22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaTableDataset, self).__init__(**kwargs) + self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py new file mode 100644 index 000000000000..6dc5c48ba21d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py new file mode 100644 index 000000000000..bfe9c323d302 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py new file mode 100644 index 000000000000..eddc50b0f1c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py new file mode 100644 index 000000000000..d6dcbda60e36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py new file mode 100644 index 000000000000..752ffd8554b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py new file mode 100644 index 000000000000..2682969c5016 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py new file mode 100644 index 000000000000..b06a53c10db3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py new file mode 100644 index 000000000000..83b76d0a4fdd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py new file mode 100644 index 000000000000..d098acc1bbda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py new file mode 100644 index 000000000000..b688fe16683b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py new file mode 100644 index 000000000000..37bdf610ab35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py new file mode 100644 index 000000000000..24601ba6b793 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapTableResourceDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py new file mode 100644 index 000000000000..7b034ccd3a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py new file mode 100644 index 000000000000..35799515440e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(SapTableSource, self).__init__(**kwargs) + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py new file mode 100644 index 000000000000..bed7bbb93932 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py new file mode 100644 index 000000000000..b9ea331b8c6e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger import MultiplePipelineTrigger + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, **kwargs): + super(ScheduleTrigger, self).__init__(**kwargs) + self.recurrence = kwargs.get('recurrence', None) + self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py new file mode 100644 index 000000000000..f13f01c7fa13 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.recurrence = recurrence + self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py new file mode 100644 index 000000000000..85408c45547b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, **kwargs): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py new file mode 100644 index 000000000000..a9b6eded7b96 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py new file mode 100644 index 000000000000..50bc0131a5cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ScriptAction, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.uri = kwargs.get('uri', None) + self.roles = kwargs.get('roles', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py new file mode 100644 index 000000000000..c0e278073219 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: + super(ScriptAction, self).__init__(**kwargs) + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py new file mode 100644 index 000000000000..3d9475dd4382 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs): + super(SecretBase, self).__init__(**kwargs) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py new file mode 100644 index 000000000000..29403e61b245 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs) -> None: + super(SecretBase, self).__init__(**kwargs) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py new file mode 100644 index 000000000000..bec430fdf8a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base import SecretBase + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecureString, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py new file mode 100644 index 000000000000..d7ebd5e13e78 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base_py3 import SecretBase + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, value: str, **kwargs) -> None: + super(SecureString, self).__init__(**kwargs) + self.value = value + self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py new file mode 100644 index 000000000000..fc56f8e8a799 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference import DependencyReference + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py new file mode 100644 index 000000000000..1dd1e575c2e8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference_py3 import DependencyReference + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = offset + self.size = size + self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py new file mode 100644 index 000000000000..20744f02306d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime import IntegrationRuntime + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.linked_info = kwargs.get('linked_info', None) + self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py new file mode 100644 index 000000000000..1491a80dc19a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py @@ -0,0 +1,139 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py new file mode 100644 index 000000000000..59b703737a5d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py @@ -0,0 +1,139 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py new file mode 100644 index 000000000000..a25d04373849 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_py3 import IntegrationRuntime + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.linked_info = linked_info + self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py new file mode 100644 index 000000000000..5dd9995987d9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status import IntegrationRuntimeStatus + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = kwargs.get('nodes', None) + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = kwargs.get('links', None) + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py new file mode 100644 index 000000000000..acad7661fc15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status_py3 import IntegrationRuntimeStatus + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = nodes + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = links + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py new file mode 100644 index 000000000000..4d42f575e769 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py new file mode 100644 index 000000000000..b9d166f241d6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.authentication_type = authentication_type + self.username = username + self.password = password + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py new file mode 100644 index 000000000000..a9821ba0fd10 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py new file mode 100644 index 000000000000..fcd2fd537a31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py new file mode 100644 index 000000000000..16b10bb8de5e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py new file mode 100644 index 000000000000..20d1a64d04d3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py new file mode 100644 index 000000000000..e8dd1690862d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SetVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py new file mode 100644 index 000000000000..e045abee3dfb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py new file mode 100644 index 000000000000..5b8fd4e42ba2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py new file mode 100644 index 000000000000..5e7b4faf77ad --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings import StoreReadSettings + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py new file mode 100644 index 000000000000..e6c27e3ad08a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py new file mode 100644 index 000000000000..aa4c535fc514 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.private_key_path = kwargs.get('private_key_path', None) + self.private_key_content = kwargs.get('private_key_content', None) + self.pass_phrase = kwargs.get('pass_phrase', None) + self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) + self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) + self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py new file mode 100644 index 000000000000..7decd7781348 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: + super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.private_key_path = private_key_path + self.private_key_content = private_key_content + self.pass_phrase = pass_phrase + self.skip_host_key_validation = skip_host_key_validation + self.host_key_fingerprint = host_key_fingerprint + self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py new file mode 100644 index 000000000000..ee5311dceb7a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifyLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py new file mode 100644 index 000000000000..ea6189277552 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py new file mode 100644 index 000000000000..ab3e475b9c97 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifyObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py new file mode 100644 index 000000000000..98b9c43c21e8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py new file mode 100644 index 000000000000..d4596976d459 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py new file mode 100644 index 000000000000..6b56edd62904 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py new file mode 100644 index 000000000000..4f9ab49a7bba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py new file mode 100644 index 000000000000..f6433b6ab187 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py new file mode 100644 index 000000000000..bdbdf067e1ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) + self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py new file mode 100644 index 000000000000..afe383951f1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: + super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.spark_object_dataset_schema = spark_object_dataset_schema + self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py new file mode 100644 index 000000000000..6d670c1c6b2a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py new file mode 100644 index 000000000000..8da01b0cd823 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py new file mode 100644 index 000000000000..8fe57eaa3595 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlDWSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.allow_poly_base = kwargs.get('allow_poly_base', None) + self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py new file mode 100644 index 000000000000..6f9241560e59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.table_option = table_option + self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py new file mode 100644 index 000000000000..1a020672f7c2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlDWSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py new file mode 100644 index 000000000000..ae8fe605024f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py new file mode 100644 index 000000000000..6a11990fc720 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py new file mode 100644 index 000000000000..16fe41cf47f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py new file mode 100644 index 000000000000..4d4db9b09281 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py new file mode 100644 index 000000000000..952bc7b4da4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py new file mode 100644 index 000000000000..45d342212ea4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py new file mode 100644 index 000000000000..3eb8c5063dc1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py new file mode 100644 index 000000000000..b3cbe492bbf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dd5daf2c5660 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py new file mode 100644 index 000000000000..f9aa011047ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py new file mode 100644 index 000000000000..6f31002f32d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, **kwargs): + super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) + self.stored_procedure_name = kwargs.get('stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py new file mode 100644 index 000000000000..477f0c6c775c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py new file mode 100644 index 000000000000..3998671ee8ae --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py new file mode 100644 index 000000000000..989780c9bfda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.sql_server_table_dataset_schema = sql_server_table_dataset_schema + self.table = table + self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py new file mode 100644 index 000000000000..3a81c5f7ea2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py new file mode 100644 index 000000000000..d33810d9abef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py new file mode 100644 index 000000000000..bb31474b1f7c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, **kwargs): + super(SqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py new file mode 100644 index 000000000000..dcad458fd4a6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py new file mode 100644 index 000000000000..4edfc8b211f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.redirect_uri = kwargs.get('redirect_uri', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py new file mode 100644 index 000000000000..40719f600a18 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py new file mode 100644 index 000000000000..3903382d2e3a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py new file mode 100644 index 000000000000..6d624dc6feef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py new file mode 100644 index 000000000000..f083df43f13a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py new file mode 100644 index 000000000000..ec8a741d564c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py new file mode 100644 index 000000000000..63512fdec4d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py new file mode 100644 index 000000000000..5df0fc8941da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py new file mode 100644 index 000000000000..5dff9764e2a2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py new file mode 100644 index 000000000000..43697ba62146 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py new file mode 100644 index 000000000000..e7d31d369392 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py new file mode 100644 index 000000000000..14cbfca99d4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py new file mode 100644 index 000000000000..c090694416a9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py new file mode 100644 index 000000000000..051eaffa2bf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py new file mode 100644 index 000000000000..36f295c5a4aa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py new file mode 100644 index 000000000000..cd10dd457a42 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, value, **kwargs) -> None: + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py new file mode 100644 index 000000000000..350b0d92852b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py new file mode 100644 index 000000000000..d6483fda2c08 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py new file mode 100644 index 000000000000..cfdebe717541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, **kwargs): + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = kwargs.get('log_path', None) + self.access_credential = kwargs.get('access_credential', None) + self.log_refresh_interval = kwargs.get('log_refresh_interval', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py new file mode 100644 index 000000000000..de4fbe35dcb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py new file mode 100644 index 000000000000..811075137f41 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, **kwargs): + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py new file mode 100644 index 000000000000..a029c9f7ebc4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py new file mode 100644 index 000000000000..79931e1ceaf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py new file mode 100644 index 000000000000..45f7e15af4fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py new file mode 100644 index 000000000000..9b782613ee08 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.name = kwargs.get('name', None) + self.properties = kwargs.get('properties', None) + self.error = kwargs.get('error', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py new file mode 100644 index 000000000000..a4b82b8f6bcd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = status + self.name = name + self.properties = properties + self.error = error diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py new file mode 100644 index 000000000000..b04fc1138797 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py new file mode 100644 index 000000000000..248d0aa9b8ae --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = kwargs.get('package_path', None) + self.type = kwargs.get('type', None) + self.package_password = kwargs.get('package_password', None) + self.access_credential = kwargs.get('access_credential', None) + self.configuration_path = kwargs.get('configuration_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py new file mode 100644 index 000000000000..cc442d8d35b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + } + + def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py new file mode 100644 index 000000000000..e1e932e97ae6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py new file mode 100644 index 000000000000..c456af0bab48 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py new file mode 100644 index 000000000000..6a4ff73768f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py new file mode 100644 index 000000000000..c29a36fb628e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py new file mode 100644 index 000000000000..11b95a644e2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py new file mode 100644 index 000000000000..30b78594e6ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.is_sensitive = kwargs.get('is_sensitive', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py new file mode 100644 index 000000000000..b425a19adc7e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = value + self.is_sensitive = is_sensitive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py new file mode 100644 index 000000000000..73fda3b27967 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py new file mode 100644 index 000000000000..e709842ff465 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py new file mode 100644 index 000000000000..05ca8dff2c52 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + self.enable_compression = kwargs.get('enable_compression', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py new file mode 100644 index 000000000000..13b4353963a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + self.enable_compression = enable_compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py new file mode 100644 index 000000000000..c12c0ce8860d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py new file mode 100644 index 000000000000..e2026fd52b93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py new file mode 100644 index 000000000000..728b8cdd8c89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreWriteSettings(Model): + """Connector write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} + } + + def __init__(self, **kwargs): + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py new file mode 100644 index 000000000000..7cce5d205541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreWriteSettings(Model): + """Connector write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py new file mode 100644 index 000000000000..ff16595aa8c7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py new file mode 100644 index 000000000000..2842ef9ae35c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, value=None, type=None, **kwargs) -> None: + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = value + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py new file mode 100644 index 000000000000..c80b531db7d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py new file mode 100644 index 000000000000..3b2d9ec62366 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py new file mode 100644 index 000000000000..83de0e6f61f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.schema = kwargs.get('schema', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py new file mode 100644 index 000000000000..5b6cc0ce6ded --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.schema = schema + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py new file mode 100644 index 000000000000..02f89a8fca25 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py new file mode 100644 index 000000000000..c11e96174349 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py new file mode 100644 index 000000000000..ff2dfd5471fb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py new file mode 100644 index 000000000000..88e9d3c287fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py new file mode 100644 index 000000000000..6e02b0d389ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py new file mode 100644 index 000000000000..aac40efe69e0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py new file mode 100644 index 000000000000..0f9c023f9553 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py new file mode 100644 index 000000000000..04824e614ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py new file mode 100644 index 000000000000..81d1c8e202c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py new file mode 100644 index 000000000000..79d8ccb01f14 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py new file mode 100644 index 000000000000..e396bfd6fb15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py new file mode 100644 index 000000000000..892707b7f133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py new file mode 100644 index 000000000000..48f32bf10133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TextFormat, self).__init__(**kwargs) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.escape_char = kwargs.get('escape_char', None) + self.quote_char = kwargs.get('quote_char', None) + self.null_value = kwargs.get('null_value', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py new file mode 100644 index 000000000000..0d876f62b112 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header + self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py new file mode 100644 index 000000000000..728ffc32bcb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, **kwargs): + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.runtime_state = None + self.annotations = kwargs.get('annotations', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py new file mode 100644 index 000000000000..089aa9a3e5fc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference import DependencyReference + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, **kwargs): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = kwargs.get('reference_trigger', None) + self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py new file mode 100644 index 000000000000..716a0d926f8b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference_py3 import DependencyReference + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, *, reference_trigger, **kwargs) -> None: + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = reference_trigger + self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py new file mode 100644 index 000000000000..70c9f2904347 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = kwargs.get('pipeline_reference', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py new file mode 100644 index 000000000000..e32af8006326 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = pipeline_reference + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py new file mode 100644 index 000000000000..862973544ab4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.runtime_state = None + self.annotations = annotations + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py new file mode 100644 index 000000000000..a4f952dac85f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, **kwargs): + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py new file mode 100644 index 000000000000..805e407e80a7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, *, reference_name: str, **kwargs) -> None: + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py new file mode 100644 index 000000000000..539ac4775350 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, **kwargs): + super(TriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py new file mode 100644 index 000000000000..1a7a003f4a6e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class TriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`TriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[TriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(TriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py new file mode 100644 index 000000000000..ae6a04ac3128 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(TriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py new file mode 100644 index 000000000000..9fad7bbfd9fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py new file mode 100644 index 000000000000..5a9fe50f6894 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py new file mode 100644 index 000000000000..7684fe7eb7dc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py new file mode 100644 index 000000000000..391a2441b3d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py new file mode 100644 index 000000000000..6a581e757840 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py new file mode 100644 index 000000000000..40ead4c50fe4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py new file mode 100644 index 000000000000..939624ae5042 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, **kwargs): + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) + self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py new file mode 100644 index 000000000000..89dcefbc8c09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_dependency_reference import TriggerDependencyReference + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py new file mode 100644 index 000000000000..648f25e59937 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_dependency_reference_py3 import TriggerDependencyReference + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.offset = offset + self.size = size + self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..6856629c8b91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on + self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py new file mode 100644 index 000000000000..eede36501d6c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(UntilActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.timeout = kwargs.get('timeout', None) + self.activities = kwargs.get('activities', None) + self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py new file mode 100644 index 000000000000..40c03ce18591 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: + super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.timeout = timeout + self.activities = activities + self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py new file mode 100644 index 000000000000..c6460310225a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py new file mode 100644 index 000000000000..de1605885139 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = concurrent_jobs_limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py new file mode 100644 index 000000000000..bd5e332b50f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = kwargs.get('auto_update', None) + self.update_delay_offset = kwargs.get('update_delay_offset', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py new file mode 100644 index 000000000000..731cb942b472 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = auto_update + self.update_delay_offset = update_delay_offset diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py new file mode 100644 index 000000000000..b51e313b6f0c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = kwargs.get('permissions', None) + self.access_resource_path = kwargs.get('access_resource_path', None) + self.profile_name = kwargs.get('profile_name', None) + self.start_time = kwargs.get('start_time', None) + self.expire_time = kwargs.get('expire_time', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py new file mode 100644 index 000000000000..26e2a7639a09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = permissions + self.access_resource_path = access_resource_path + self.profile_name = profile_name + self.start_time = start_time + self.expire_time = expire_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py new file mode 100644 index 000000000000..30692d2960ec --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(UserProperty, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py new file mode 100644 index 000000000000..7b4f3beb0195 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, name: str, value, **kwargs) -> None: + super(UserProperty, self).__init__(**kwargs) + self.name = name + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py new file mode 100644 index 000000000000..0d92d32c12b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py new file mode 100644 index 000000000000..f4680400b447 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py new file mode 100644 index 000000000000..6d7fd808fa44 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VariableSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py new file mode 100644 index 000000000000..d60b3b4b1591 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(VariableSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py new file mode 100644 index 000000000000..6b5e8d0103f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py new file mode 100644 index 000000000000..3aee3a5ae0f6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py new file mode 100644 index 000000000000..d0b642f15d38 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py new file mode 100644 index 000000000000..a1c4d755f2b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py new file mode 100644 index 000000000000..151a0d000e3f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) + self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py new file mode 100644 index 000000000000..4c2fc8da32ad --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: + super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.vertica_table_dataset_schema = vertica_table_dataset_schema + self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py new file mode 100644 index 000000000000..91f3decc7473 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(WaitActivity, self).__init__(**kwargs) + self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) + self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py new file mode 100644 index 000000000000..ff85c9d16733 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.wait_time_in_seconds = wait_time_in_seconds + self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py new file mode 100644 index 000000000000..70264719d52e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, **kwargs): + super(WebActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.url = kwargs.get('url', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) + self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py new file mode 100644 index 000000000000..6ebb193ae5e9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.pfx = kwargs.get('pfx', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.resource = kwargs.get('resource', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py new file mode 100644 index 000000000000..4c2b68ba7161 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = type + self.pfx = pfx + self.username = username + self.password = password + self.resource = resource diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py new file mode 100644 index 000000000000..9a64114a00c6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: + super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.url = url + self.headers = headers + self.body = body + self.authentication = authentication + self.datasets = datasets + self.linked_services = linked_services + self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py new file mode 100644 index 000000000000..d3bd2f2594ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebAnonymousAuthentication, self).__init__(**kwargs) + self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py new file mode 100644 index 000000000000..ee7a4e780a1f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) + self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py new file mode 100644 index 000000000000..90050f7dae28 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(WebBasicAuthentication, self).__init__(**kwargs) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py new file mode 100644 index 000000000000..71577ec86565 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, username, password, **kwargs) -> None: + super(WebBasicAuthentication, self).__init__(url=url, **kwargs) + self.username = username + self.password = password + self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py new file mode 100644 index 000000000000..671808ca85d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(WebClientCertificateAuthentication, self).__init__(**kwargs) + self.pfx = kwargs.get('pfx', None) + self.password = kwargs.get('password', None) + self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py new file mode 100644 index 000000000000..7ac859b677a8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, pfx, password, **kwargs) -> None: + super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) + self.pfx = pfx + self.password = password + self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py new file mode 100644 index 000000000000..1c648c42c3e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py new file mode 100644 index 000000000000..40cdc6f732da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py new file mode 100644 index 000000000000..18fadba3f3ee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, **kwargs): + super(WebLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py new file mode 100644 index 000000000000..3e491b0fac4d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py new file mode 100644 index 000000000000..22290e80b19f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, **kwargs): + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py new file mode 100644 index 000000000000..1c162c2f1004 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = url + self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py new file mode 100644 index 000000000000..c5d3a2a8f00a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebSource, self).__init__(**kwargs) + self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py new file mode 100644 index 000000000000..684e1d4233cc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py new file mode 100644 index 000000000000..3980fe3d885a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(WebTableDataset, self).__init__(**kwargs) + self.index = kwargs.get('index', None) + self.path = kwargs.get('path', None) + self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py new file mode 100644 index 000000000000..edb2344c35d2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index = index + self.path = path + self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py new file mode 100644 index 000000000000..24973f577133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.private_key = kwargs.get('private_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py new file mode 100644 index 000000000000..433c65ade739 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.consumer_key = consumer_key + self.private_key = private_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py new file mode 100644 index 000000000000..53c5edd44cec --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py new file mode 100644 index 000000000000..673d41e1771e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py new file mode 100644 index 000000000000..a37852a5b419 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py new file mode 100644 index 000000000000..bbee6c6fa1f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py new file mode 100644 index 000000000000..fe34dff77ea9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py new file mode 100644 index 000000000000..f82f6221592b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py new file mode 100644 index 000000000000..062d508860a6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py new file mode 100644 index 000000000000..ef5a67d4fe35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py new file mode 100644 index 000000000000..274c6fc09f19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py new file mode 100644 index 000000000000..6d7dc29bdf8a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 013675bd0200..826179f5fb63 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -9,20 +9,20 @@ # regenerated. # -------------------------------------------------------------------------- -from ._operations import Operations -from ._factories_operations import FactoriesOperations -from ._exposure_control_operations import ExposureControlOperations -from ._integration_runtimes_operations import IntegrationRuntimesOperations -from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations -from ._linked_services_operations import LinkedServicesOperations -from ._datasets_operations import DatasetsOperations -from ._pipelines_operations import PipelinesOperations -from ._pipeline_runs_operations import PipelineRunsOperations -from ._activity_runs_operations import ActivityRunsOperations -from ._triggers_operations import TriggersOperations -from ._trigger_runs_operations import TriggerRunsOperations -from ._rerun_triggers_operations import RerunTriggersOperations +from .operations import Operations +from .factories_operations import FactoriesOperations +from .exposure_control_operations import ExposureControlOperations +from .integration_runtimes_operations import IntegrationRuntimesOperations +from .integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from .integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from .linked_services_operations import LinkedServicesOperations +from .datasets_operations import DatasetsOperations +from .pipelines_operations import PipelinesOperations +from .pipeline_runs_operations import PipelineRunsOperations +from .activity_runs_operations import ActivityRunsOperations +from .triggers_operations import TriggersOperations +from .trigger_runs_operations import TriggerRunsOperations +from .rerun_triggers_operations import RerunTriggersOperations __all__ = [ 'Operations', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py similarity index 97% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py index 4d9d0775cb0f..f338a1a9c835 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py @@ -19,8 +19,6 @@ class ActivityRunsOperations(object): """ActivityRunsOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -100,6 +98,7 @@ def query_by_pipeline_run( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('ActivityRunsQueryResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py similarity index 97% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py index 89feb52cc2cd..278815d03479 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py @@ -19,8 +19,6 @@ class DatasetsOperations(object): """DatasetsOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,7 +55,8 @@ def list_by_factory( ~azure.mgmt.datafactory.models.DatasetResourcePaged[~azure.mgmt.datafactory.models.DatasetResource] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -88,11 +87,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -103,10 +97,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} @@ -179,6 +175,7 @@ def create_or_update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('DatasetResource', response) @@ -249,6 +246,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('DatasetResource', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py index 443a826821a5..4a648d96586c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -19,8 +19,6 @@ class ExposureControlOperations(object): """ExposureControlOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -97,6 +95,7 @@ def get_feature_value( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('ExposureControlResponse', response) @@ -168,6 +167,7 @@ def get_feature_value_by_factory( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('ExposureControlResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py index 828834a91c49..b06c12f3e8c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py @@ -19,8 +19,6 @@ class FactoriesOperations(object): """FactoriesOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -53,7 +51,8 @@ def list( ~azure.mgmt.datafactory.models.FactoryPaged[~azure.mgmt.datafactory.models.Factory] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list.metadata['url'] @@ -82,11 +81,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -97,10 +91,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} @@ -164,6 +160,7 @@ def configure_factory_repo( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -190,7 +187,8 @@ def list_by_resource_group( ~azure.mgmt.datafactory.models.FactoryPaged[~azure.mgmt.datafactory.models.Factory] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_resource_group.metadata['url'] @@ -220,11 +218,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -235,10 +228,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} @@ -306,6 +301,7 @@ def create_or_update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -377,6 +373,7 @@ def update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -444,6 +441,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('Factory', response) @@ -566,6 +564,7 @@ def get_git_hub_access_token( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('GitHubAccessTokenResponse', response) @@ -633,6 +632,7 @@ def get_data_plane_access( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('AccessPolicyResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py similarity index 99% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py index 870f990c1f10..81467b9e3385 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py @@ -19,8 +19,6 @@ class IntegrationRuntimeNodesOperations(object): """IntegrationRuntimeNodesOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -98,6 +96,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', response) @@ -236,6 +235,7 @@ def update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', response) @@ -304,6 +304,7 @@ def get_ip_address( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py index aa8b795123ef..230f12d023c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py @@ -21,8 +21,6 @@ class IntegrationRuntimeObjectMetadataOperations(object): """IntegrationRuntimeObjectMetadataOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -208,6 +206,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('SsisObjectMetadataListResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py similarity index 99% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py index f31eeeb0952f..0a64be3b1441 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py @@ -21,8 +21,6 @@ class IntegrationRuntimesOperations(object): """IntegrationRuntimesOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -59,7 +57,8 @@ def list_by_factory( ~azure.mgmt.datafactory.models.IntegrationRuntimeResourcePaged[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -90,11 +89,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -105,10 +99,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} @@ -181,6 +177,7 @@ def create_or_update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeResource', response) @@ -251,6 +248,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeResource', response) @@ -331,6 +329,7 @@ def update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeResource', response) @@ -453,6 +452,7 @@ def get_status( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) @@ -520,6 +520,7 @@ def get_connection_info( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', response) @@ -594,6 +595,7 @@ def regenerate_auth_key( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeAuthKeys', response) @@ -658,6 +660,7 @@ def list_auth_keys( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeAuthKeys', response) @@ -967,6 +970,7 @@ def get_monitoring_data( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeMonitoringData', response) @@ -1165,6 +1169,7 @@ def create_linked_integration_runtime( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py similarity index 97% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py index 5e7d32bd357a..e6878336df91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py @@ -19,8 +19,6 @@ class LinkedServicesOperations(object): """LinkedServicesOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,7 +55,8 @@ def list_by_factory( ~azure.mgmt.datafactory.models.LinkedServiceResourcePaged[~azure.mgmt.datafactory.models.LinkedServiceResource] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -88,11 +87,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -103,10 +97,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} @@ -179,6 +175,7 @@ def create_or_update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('LinkedServiceResource', response) @@ -249,6 +246,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('LinkedServiceResource', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py similarity index 90% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py index 2363a74cd143..2273e12d5ada 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py @@ -19,8 +19,6 @@ class Operations(object): """Operations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -53,7 +51,8 @@ def list( ~azure.mgmt.datafactory.models.OperationPaged[~azure.mgmt.datafactory.models.Operation] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list.metadata['url'] @@ -78,11 +77,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -93,10 +87,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py index 4fe443938ef5..de8744612d20 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py @@ -19,8 +19,6 @@ class PipelineRunsOperations(object): """PipelineRunsOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -97,6 +95,7 @@ def query_by_factory( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('PipelineRunsQueryResponse', response) @@ -161,6 +160,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('PipelineRun', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py index 00201749beee..343396e705ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -19,8 +19,6 @@ class PipelinesOperations(object): """PipelinesOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -57,7 +55,8 @@ def list_by_factory( ~azure.mgmt.datafactory.models.PipelineResourcePaged[~azure.mgmt.datafactory.models.PipelineResource] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -88,11 +87,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -103,10 +97,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} @@ -177,6 +173,7 @@ def create_or_update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('PipelineResource', response) @@ -247,6 +244,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('PipelineResource', response) @@ -394,6 +392,7 @@ def create_run( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('CreateRunResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py index 6d5f8e9831de..58e0066a60dd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py @@ -21,8 +21,6 @@ class RerunTriggersOperations(object): """RerunTriggersOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -106,6 +104,7 @@ def create( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('TriggerResource', response) @@ -397,7 +396,8 @@ def list_by_trigger( ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_trigger.metadata['url'] @@ -429,11 +429,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -444,10 +439,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py index 6cbf968008a1..e4e4774ae3bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py @@ -19,8 +19,6 @@ class TriggerRunsOperations(object): """TriggerRunsOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -156,6 +154,7 @@ def query_by_factory( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('TriggerRunsQueryResponse', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py similarity index 98% rename from sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py rename to sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py index 57e31b1bd8c9..d6a2d51cf85a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py @@ -21,8 +21,6 @@ class TriggersOperations(object): """TriggersOperations operations. - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -59,7 +57,8 @@ def list_by_factory( ~azure.mgmt.datafactory.models.TriggerResourcePaged[~azure.mgmt.datafactory.models.TriggerResource] :raises: :class:`CloudError` """ - def prepare_request(next_link=None): + def internal_paging(next_link=None, raw=False): + if not next_link: # Construct URL url = self.list_by_factory.metadata['url'] @@ -90,11 +89,6 @@ def prepare_request(next_link=None): # Construct and send request request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: @@ -105,10 +99,12 @@ def internal_paging(next_link=None): return response # Deserialize response - header_dict = None + deserialized = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies) + if raw: header_dict = {} - deserialized = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} @@ -181,6 +177,7 @@ def create_or_update( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('TriggerResource', response) @@ -251,6 +248,7 @@ def get( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('TriggerResource', response) @@ -471,6 +469,7 @@ def get_event_subscription_status( raise exp deserialized = None + if response.status_code == 200: deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index e4f3d5055303..a39916c162ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.8.0" +VERSION = "1.0.0" From 6a521edf7b0bfd665ff6f15464d6b85e6d29b005 Mon Sep 17 00:00:00 2001 From: Zim Kalinowski Date: Fri, 30 Aug 2019 12:25:52 +0000 Subject: [PATCH 26/30] v2 --- .../data_factory_management_client.py | 6 +- .../azure/mgmt/datafactory/models/__init__.py | 1547 ++++++----------- .../models/access_policy_response.py | 10 +- .../models/access_policy_response_py3.py | 36 - .../azure/mgmt/datafactory/models/activity.py | 20 +- .../datafactory/models/activity_dependency.py | 17 +- .../models/activity_dependency_py3.py | 46 - .../datafactory/models/activity_policy.py | 16 +- .../datafactory/models/activity_policy_py3.py | 59 - .../mgmt/datafactory/models/activity_py3.py | 63 - .../mgmt/datafactory/models/activity_run.py | 6 +- .../datafactory/models/activity_run_py3.py | 102 -- .../models/activity_runs_query_response.py | 12 +- .../activity_runs_query_response_py3.py | 39 - .../models/amazon_mws_linked_service.py | 40 +- .../models/amazon_mws_linked_service_py3.py | 106 -- .../models/amazon_mws_object_dataset.py | 12 +- .../models/amazon_mws_object_dataset_py3.py | 72 - .../datafactory/models/amazon_mws_source.py | 10 +- .../models/amazon_mws_source_py3.py | 57 - .../models/amazon_redshift_linked_service.py | 28 +- .../amazon_redshift_linked_service_py3.py | 86 - .../models/amazon_redshift_source.py | 12 +- .../models/amazon_redshift_source_py3.py | 65 - .../models/amazon_redshift_table_dataset.py | 16 +- .../amazon_redshift_table_dataset_py3.py | 82 - .../datafactory/models/amazon_s3_dataset.py | 30 +- .../models/amazon_s3_dataset_py3.py | 107 -- .../models/amazon_s3_linked_service.py | 16 +- .../models/amazon_s3_linked_service_py3.py | 77 - .../datafactory/models/amazon_s3_location.py | 12 +- .../models/amazon_s3_location_py3.py | 55 - .../models/amazon_s3_read_settings.py | 22 +- .../models/amazon_s3_read_settings_py3.py | 78 - .../models/append_variable_activity.py | 14 +- .../models/append_variable_activity_py3.py | 60 - .../mgmt/datafactory/models/avro_dataset.py | 18 +- .../datafactory/models/avro_dataset_py3.py | 83 - .../mgmt/datafactory/models/avro_format.py | 15 +- .../datafactory/models/avro_format_py3.py | 46 - .../mgmt/datafactory/models/avro_sink.py | 12 +- .../mgmt/datafactory/models/avro_sink_py3.py | 69 - .../mgmt/datafactory/models/avro_source.py | 10 +- .../datafactory/models/avro_source_py3.py | 56 - .../datafactory/models/avro_write_settings.py | 12 +- .../models/avro_write_settings_py3.py | 46 - .../models/azure_batch_linked_service.py | 35 +- .../models/azure_batch_linked_service_py3.py | 88 - .../datafactory/models/azure_blob_dataset.py | 24 +- .../models/azure_blob_dataset_py3.py | 100 -- .../models/azure_blob_fs_dataset.py | 18 +- .../models/azure_blob_fs_dataset_py3.py | 85 - .../models/azure_blob_fs_linked_service.py | 24 +- .../azure_blob_fs_linked_service_py3.py | 86 - .../models/azure_blob_fs_location.py | 10 +- .../models/azure_blob_fs_location_py3.py | 50 - .../models/azure_blob_fs_read_settings.py | 20 +- .../models/azure_blob_fs_read_settings_py3.py | 73 - .../datafactory/models/azure_blob_fs_sink.py | 10 +- .../models/azure_blob_fs_sink_py3.py | 65 - .../models/azure_blob_fs_source.py | 14 +- .../models/azure_blob_fs_source_py3.py | 68 - .../models/azure_blob_fs_write_settings.py | 10 +- .../azure_blob_fs_write_settings_py3.py | 51 - .../azure_blob_storage_linked_service.py | 26 +- .../azure_blob_storage_linked_service_py3.py | 104 -- .../models/azure_blob_storage_location.py | 10 +- .../models/azure_blob_storage_location_py3.py | 50 - .../azure_blob_storage_read_settings.py | 20 +- .../azure_blob_storage_read_settings_py3.py | 73 - .../azure_blob_storage_write_settings.py | 10 +- .../azure_blob_storage_write_settings_py3.py | 51 - .../azure_data_explorer_command_activity.py | 19 +- ...zure_data_explorer_command_activity_py3.py | 71 - .../azure_data_explorer_linked_service.py | 40 +- .../azure_data_explorer_linked_service_py3.py | 86 - .../models/azure_data_explorer_sink.py | 14 +- .../models/azure_data_explorer_sink_py3.py | 76 - .../models/azure_data_explorer_source.py | 18 +- .../models/azure_data_explorer_source_py3.py | 70 - .../azure_data_explorer_table_dataset.py | 12 +- .../azure_data_explorer_table_dataset_py3.py | 72 - ...zure_data_lake_analytics_linked_service.py | 32 +- ..._data_lake_analytics_linked_service_py3.py | 99 -- .../models/azure_data_lake_store_dataset.py | 18 +- .../azure_data_lake_store_dataset_py3.py | 86 - .../azure_data_lake_store_linked_service.py | 28 +- ...zure_data_lake_store_linked_service_py3.py | 98 -- .../models/azure_data_lake_store_location.py | 15 +- .../azure_data_lake_store_location_py3.py | 45 - .../azure_data_lake_store_read_settings.py | 20 +- ...azure_data_lake_store_read_settings_py3.py | 73 - .../models/azure_data_lake_store_sink.py | 12 +- .../models/azure_data_lake_store_sink_py3.py | 69 - .../models/azure_data_lake_store_source.py | 10 +- .../azure_data_lake_store_source_py3.py | 58 - .../azure_data_lake_store_write_settings.py | 15 +- ...zure_data_lake_store_write_settings_py3.py | 46 - .../models/azure_databricks_linked_service.py | 40 +- .../azure_databricks_linked_service_py3.py | 126 -- .../models/azure_function_activity.py | 27 +- .../models/azure_function_activity_py3.py | 85 - .../models/azure_function_linked_service.py | 18 +- .../azure_function_linked_service_py3.py | 69 - .../models/azure_key_vault_linked_service.py | 12 +- .../azure_key_vault_linked_service_py3.py | 60 - .../azure_key_vault_secret_reference.py | 20 +- .../azure_key_vault_secret_reference_py3.py | 51 - .../models/azure_maria_db_linked_service.py | 14 +- .../azure_maria_db_linked_service_py3.py | 69 - .../models/azure_maria_db_source.py | 10 +- .../models/azure_maria_db_source_py3.py | 57 - .../models/azure_maria_db_table_dataset.py | 12 +- .../azure_maria_db_table_dataset_py3.py | 72 - .../azure_ml_batch_execution_activity.py | 16 +- .../azure_ml_batch_execution_activity_py3.py | 82 - .../models/azure_ml_linked_service.py | 29 +- .../models/azure_ml_linked_service_py3.py | 94 - .../azure_ml_update_resource_activity.py | 28 +- .../azure_ml_update_resource_activity_py3.py | 81 - .../models/azure_ml_web_service_file.py | 20 +- .../models/azure_ml_web_service_file_py3.py | 43 - .../models/azure_my_sql_linked_service.py | 16 +- .../models/azure_my_sql_linked_service_py3.py | 71 - .../datafactory/models/azure_my_sql_sink.py | 10 +- .../models/azure_my_sql_sink_py3.py | 66 - .../datafactory/models/azure_my_sql_source.py | 10 +- .../models/azure_my_sql_source_py3.py | 57 - .../models/azure_my_sql_table_dataset.py | 12 +- .../models/azure_my_sql_table_dataset_py3.py | 72 - .../azure_postgre_sql_linked_service.py | 14 +- .../azure_postgre_sql_linked_service_py3.py | 70 - .../models/azure_postgre_sql_sink.py | 10 +- .../models/azure_postgre_sql_sink_py3.py | 66 - .../models/azure_postgre_sql_source.py | 10 +- .../models/azure_postgre_sql_source_py3.py | 57 - .../models/azure_postgre_sql_table_dataset.py | 16 +- .../azure_postgre_sql_table_dataset_py3.py | 84 - .../datafactory/models/azure_queue_sink.py | 18 +- .../models/azure_queue_sink_py3.py | 61 - .../models/azure_search_index_dataset.py | 16 +- .../models/azure_search_index_dataset_py3.py | 73 - .../models/azure_search_index_sink.py | 10 +- .../models/azure_search_index_sink_py3.py | 67 - .../models/azure_search_linked_service.py | 18 +- .../models/azure_search_linked_service_py3.py | 69 - .../azure_sql_database_linked_service.py | 22 +- .../azure_sql_database_linked_service_py3.py | 87 - .../models/azure_sql_dw_linked_service.py | 22 +- .../models/azure_sql_dw_linked_service_py3.py | 88 - .../models/azure_sql_dw_table_dataset.py | 16 +- .../models/azure_sql_dw_table_dataset_py3.py | 82 - .../models/azure_sql_mi_linked_service.py | 22 +- .../models/azure_sql_mi_linked_service_py3.py | 87 - .../models/azure_sql_mi_table_dataset.py | 16 +- .../models/azure_sql_mi_table_dataset_py3.py | 82 - .../mgmt/datafactory/models/azure_sql_sink.py | 20 +- .../datafactory/models/azure_sql_sink_py3.py | 93 - .../datafactory/models/azure_sql_source.py | 16 +- .../models/azure_sql_source_py3.py | 73 - .../models/azure_sql_table_dataset.py | 16 +- .../models/azure_sql_table_dataset_py3.py | 82 - .../models/azure_storage_linked_service.py | 18 +- .../azure_storage_linked_service_py3.py | 83 - .../datafactory/models/azure_table_dataset.py | 16 +- .../models/azure_table_dataset_py3.py | 73 - .../datafactory/models/azure_table_sink.py | 16 +- .../models/azure_table_sink_py3.py | 81 - .../datafactory/models/azure_table_source.py | 12 +- .../models/azure_table_source_py3.py | 63 - .../azure_table_storage_linked_service.py | 18 +- .../azure_table_storage_linked_service_py3.py | 83 - .../mgmt/datafactory/models/binary_dataset.py | 16 +- .../datafactory/models/binary_dataset_py3.py | 77 - .../mgmt/datafactory/models/binary_sink.py | 10 +- .../datafactory/models/binary_sink_py3.py | 65 - .../mgmt/datafactory/models/binary_source.py | 10 +- .../datafactory/models/binary_source_py3.py | 56 - .../datafactory/models/blob_events_trigger.py | 21 +- .../models/blob_events_trigger_py3.py | 85 - .../mgmt/datafactory/models/blob_sink.py | 16 +- .../mgmt/datafactory/models/blob_sink_py3.py | 80 - .../mgmt/datafactory/models/blob_source.py | 14 +- .../datafactory/models/blob_source_py3.py | 68 - .../mgmt/datafactory/models/blob_trigger.py | 25 +- .../datafactory/models/blob_trigger_py3.py | 78 - .../models/cassandra_linked_service.py | 24 +- .../models/cassandra_linked_service_py3.py | 84 - .../datafactory/models/cassandra_source.py | 12 +- .../models/cassandra_source_py3.py | 70 - .../models/cassandra_table_dataset.py | 14 +- .../models/cassandra_table_dataset_py3.py | 77 - ...on_data_service_for_apps_entity_dataset.py | 12 +- ...ata_service_for_apps_entity_dataset_py3.py | 72 - ...on_data_service_for_apps_linked_service.py | 48 +- ...ata_service_for_apps_linked_service_py3.py | 115 -- .../common_data_service_for_apps_sink.py | 14 +- .../common_data_service_for_apps_sink_py3.py | 77 - .../common_data_service_for_apps_source.py | 10 +- ...common_data_service_for_apps_source_py3.py | 58 - .../models/concur_linked_service.py | 28 +- .../models/concur_linked_service_py3.py | 92 - .../models/concur_object_dataset.py | 12 +- .../models/concur_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/concur_source.py | 10 +- .../datafactory/models/concur_source_py3.py | 57 - .../datafactory/models/control_activity.py | 19 +- .../models/control_activity_py3.py | 60 - .../mgmt/datafactory/models/copy_activity.py | 40 +- .../datafactory/models/copy_activity_py3.py | 124 -- .../mgmt/datafactory/models/copy_sink.py | 20 +- .../mgmt/datafactory/models/copy_sink_py3.py | 82 - .../mgmt/datafactory/models/copy_source.py | 16 +- .../datafactory/models/copy_source_py3.py | 83 - .../models/cosmos_db_linked_service.py | 16 +- .../models/cosmos_db_linked_service_py3.py | 71 - ...smos_db_mongo_db_api_collection_dataset.py | 16 +- ..._db_mongo_db_api_collection_dataset_py3.py | 73 - .../cosmos_db_mongo_db_api_linked_service.py | 23 +- ...smos_db_mongo_db_api_linked_service_py3.py | 67 - .../models/cosmos_db_mongo_db_api_sink.py | 10 +- .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 - .../models/cosmos_db_mongo_db_api_source.py | 14 +- .../cosmos_db_mongo_db_api_source_py3.py | 71 - .../models/couchbase_linked_service.py | 14 +- .../models/couchbase_linked_service_py3.py | 70 - .../datafactory/models/couchbase_source.py | 10 +- .../models/couchbase_source_py3.py | 57 - .../models/couchbase_table_dataset.py | 12 +- .../models/couchbase_table_dataset_py3.py | 72 - ...eate_linked_integration_runtime_request.py | 12 +- ..._linked_integration_runtime_request_py3.py | 43 - .../datafactory/models/create_run_response.py | 10 +- .../models/create_run_response_py3.py | 34 - .../datafactory/models/custom_activity.py | 26 +- .../datafactory/models/custom_activity_py3.py | 91 - .../custom_activity_reference_object.py | 8 +- .../custom_activity_reference_object_py3.py | 33 - .../custom_data_source_linked_service.py | 12 +- .../custom_data_source_linked_service_py3.py | 58 - .../mgmt/datafactory/models/custom_dataset.py | 12 +- .../datafactory/models/custom_dataset_py3.py | 71 - .../data_factory_management_client_enums.py | 144 +- .../data_lake_analytics_usql_activity.py | 30 +- .../data_lake_analytics_usql_activity_py3.py | 98 -- .../models/databricks_notebook_activity.py | 22 +- .../databricks_notebook_activity_py3.py | 76 - .../models/databricks_spark_jar_activity.py | 23 +- .../databricks_spark_jar_activity_py3.py | 75 - .../databricks_spark_python_activity.py | 21 +- .../databricks_spark_python_activity_py3.py | 75 - .../azure/mgmt/datafactory/models/dataset.py | 26 +- .../models/dataset_bzip2_compression.py | 13 +- .../models/dataset_bzip2_compression_py3.py | 38 - .../datafactory/models/dataset_compression.py | 10 +- .../models/dataset_compression_py3.py | 47 - .../models/dataset_deflate_compression.py | 10 +- .../models/dataset_deflate_compression_py3.py | 42 - .../mgmt/datafactory/models/dataset_folder.py | 6 +- .../datafactory/models/dataset_folder_py3.py | 29 - .../models/dataset_gzip_compression.py | 10 +- .../models/dataset_gzip_compression_py3.py | 42 - .../datafactory/models/dataset_location.py | 16 +- .../models/dataset_location_py3.py | 49 - .../mgmt/datafactory/models/dataset_py3.py | 113 -- .../datafactory/models/dataset_reference.py | 15 +- .../models/dataset_reference_py3.py | 48 - .../datafactory/models/dataset_resource.py | 10 +- .../models/dataset_resource_py3.py | 53 - .../models/dataset_storage_format.py | 14 +- .../models/dataset_storage_format_py3.py | 57 - .../models/dataset_zip_deflate_compression.py | 10 +- .../dataset_zip_deflate_compression_py3.py | 42 - .../datafactory/models/db2_linked_service.py | 28 +- .../models/db2_linked_service_py3.py | 86 - .../mgmt/datafactory/models/db2_source.py | 10 +- .../mgmt/datafactory/models/db2_source_py3.py | 57 - .../datafactory/models/db2_table_dataset.py | 16 +- .../models/db2_table_dataset_py3.py | 82 - .../datafactory/models/delete_activity.py | 22 +- .../datafactory/models/delete_activity_py3.py | 87 - .../models/delimited_text_dataset.py | 32 +- .../models/delimited_text_dataset_py3.py | 122 -- .../models/delimited_text_read_settings.py | 10 +- .../delimited_text_read_settings_py3.py | 43 - .../datafactory/models/delimited_text_sink.py | 12 +- .../models/delimited_text_sink_py3.py | 70 - .../models/delimited_text_source.py | 12 +- .../models/delimited_text_source_py3.py | 61 - .../models/delimited_text_write_settings.py | 16 +- .../delimited_text_write_settings_py3.py | 49 - .../models/dependency_reference.py | 8 +- .../models/dependency_reference_py3.py | 42 - .../datafactory/models/distcp_settings.py | 25 +- .../datafactory/models/distcp_settings_py3.py | 49 - .../models/document_db_collection_dataset.py | 16 +- .../document_db_collection_dataset_py3.py | 73 - .../models/document_db_collection_sink.py | 12 +- .../models/document_db_collection_sink_py3.py | 71 - .../models/document_db_collection_source.py | 12 +- .../document_db_collection_source_py3.py | 62 - .../models/drill_linked_service.py | 14 +- .../models/drill_linked_service_py3.py | 69 - .../mgmt/datafactory/models/drill_source.py | 10 +- .../datafactory/models/drill_source_py3.py | 57 - .../datafactory/models/drill_table_dataset.py | 16 +- .../models/drill_table_dataset_py3.py | 82 - .../models/dynamics_ax_linked_service.py | 46 +- .../models/dynamics_ax_linked_service_py3.py | 93 - .../models/dynamics_ax_resource_dataset.py | 16 +- .../dynamics_ax_resource_dataset_py3.py | 73 - .../datafactory/models/dynamics_ax_source.py | 10 +- .../models/dynamics_ax_source_py3.py | 57 - .../models/dynamics_crm_entity_dataset.py | 12 +- .../models/dynamics_crm_entity_dataset_py3.py | 72 - .../models/dynamics_crm_linked_service.py | 46 +- .../models/dynamics_crm_linked_service_py3.py | 112 -- .../datafactory/models/dynamics_crm_sink.py | 14 +- .../models/dynamics_crm_sink_py3.py | 77 - .../datafactory/models/dynamics_crm_source.py | 10 +- .../models/dynamics_crm_source_py3.py | 58 - .../models/dynamics_entity_dataset.py | 12 +- .../models/dynamics_entity_dataset_py3.py | 72 - .../models/dynamics_linked_service.py | 43 +- .../models/dynamics_linked_service_py3.py | 109 -- .../mgmt/datafactory/models/dynamics_sink.py | 14 +- .../datafactory/models/dynamics_sink_py3.py | 77 - .../datafactory/models/dynamics_source.py | 10 +- .../datafactory/models/dynamics_source_py3.py | 58 - .../models/eloqua_linked_service.py | 28 +- .../models/eloqua_linked_service_py3.py | 91 - .../models/eloqua_object_dataset.py | 12 +- .../models/eloqua_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/eloqua_source.py | 10 +- .../datafactory/models/eloqua_source_py3.py | 57 - .../datafactory/models/entity_reference.py | 8 +- .../models/entity_reference_py3.py | 34 - .../models/execute_pipeline_activity.py | 18 +- .../models/execute_pipeline_activity_py3.py | 65 - .../models/execute_ssis_package_activity.py | 38 +- .../execute_ssis_package_activity_py3.py | 124 -- .../datafactory/models/execution_activity.py | 14 +- .../models/execution_activity_py3.py | 75 - .../models/exposure_control_request.py | 8 +- .../models/exposure_control_request_py3.py | 32 - .../models/exposure_control_response.py | 4 +- .../models/exposure_control_response_py3.py | 40 - .../mgmt/datafactory/models/expression.py | 12 +- .../mgmt/datafactory/models/expression_py3.py | 43 - .../azure/mgmt/datafactory/models/factory.py | 10 +- .../models/factory_git_hub_configuration.py | 18 +- .../factory_git_hub_configuration_py3.py | 58 - .../datafactory/models/factory_identity.py | 10 +- .../models/factory_identity_py3.py | 49 - .../mgmt/datafactory/models/factory_py3.py | 81 - .../models/factory_repo_configuration.py | 26 +- .../models/factory_repo_configuration_py3.py | 65 - .../datafactory/models/factory_repo_update.py | 8 +- .../models/factory_repo_update_py3.py | 33 - .../models/factory_update_parameters.py | 8 +- .../models/factory_update_parameters_py3.py | 32 - .../models/factory_vsts_configuration.py | 22 +- .../models/factory_vsts_configuration_py3.py | 62 - .../models/file_server_linked_service.py | 20 +- .../models/file_server_linked_service_py3.py | 74 - .../models/file_server_location.py | 15 +- .../models/file_server_location_py3.py | 45 - .../models/file_server_read_settings.py | 20 +- .../models/file_server_read_settings_py3.py | 73 - .../models/file_server_write_settings.py | 15 +- .../models/file_server_write_settings_py3.py | 46 - .../datafactory/models/file_share_dataset.py | 24 +- .../models/file_share_dataset_py3.py | 101 -- .../datafactory/models/file_system_sink.py | 10 +- .../models/file_system_sink_py3.py | 65 - .../datafactory/models/file_system_source.py | 10 +- .../models/file_system_source_py3.py | 58 - .../datafactory/models/filter_activity.py | 18 +- .../datafactory/models/filter_activity_py3.py | 61 - .../datafactory/models/for_each_activity.py | 22 +- .../models/for_each_activity_py3.py | 73 - .../models/format_read_settings.py | 12 +- .../models/format_read_settings_py3.py | 39 - .../models/format_write_settings.py | 12 +- .../models/format_write_settings_py3.py | 39 - .../datafactory/models/ftp_read_settings.py | 16 +- .../models/ftp_read_settings_py3.py | 63 - .../models/ftp_server_linked_service.py | 28 +- .../models/ftp_server_linked_service_py3.py | 98 -- .../datafactory/models/ftp_server_location.py | 15 +- .../models/ftp_server_location_py3.py | 45 - .../models/get_metadata_activity.py | 16 +- .../models/get_metadata_activity_py3.py | 67 - .../get_ssis_object_metadata_request.py | 6 +- .../get_ssis_object_metadata_request_py3.py | 28 - .../models/git_hub_access_token_request.py | 17 +- .../git_hub_access_token_request_py3.py | 44 - .../models/git_hub_access_token_response.py | 6 +- .../git_hub_access_token_response_py3.py | 28 - .../models/google_ad_words_linked_service.py | 45 +- .../google_ad_words_linked_service_py3.py | 119 -- .../models/google_ad_words_object_dataset.py | 12 +- .../google_ad_words_object_dataset_py3.py | 72 - .../models/google_ad_words_source.py | 10 +- .../models/google_ad_words_source_py3.py | 57 - .../models/google_big_query_linked_service.py | 41 +- .../google_big_query_linked_service_py3.py | 124 -- .../models/google_big_query_object_dataset.py | 16 +- .../google_big_query_object_dataset_py3.py | 82 - .../models/google_big_query_source.py | 10 +- .../models/google_big_query_source_py3.py | 57 - .../models/greenplum_linked_service.py | 14 +- .../models/greenplum_linked_service_py3.py | 69 - .../datafactory/models/greenplum_source.py | 10 +- .../models/greenplum_source_py3.py | 57 - .../models/greenplum_table_dataset.py | 16 +- .../models/greenplum_table_dataset_py3.py | 82 - .../models/hbase_linked_service.py | 39 +- .../models/hbase_linked_service_py3.py | 114 -- .../models/hbase_object_dataset.py | 12 +- .../models/hbase_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/hbase_source.py | 10 +- .../datafactory/models/hbase_source_py3.py | 57 - .../models/hd_insight_hive_activity.py | 26 +- .../models/hd_insight_hive_activity_py3.py | 96 - .../models/hd_insight_linked_service.py | 28 +- .../models/hd_insight_linked_service_py3.py | 96 - .../models/hd_insight_map_reduce_activity.py | 32 +- .../hd_insight_map_reduce_activity_py3.py | 99 -- .../hd_insight_on_demand_linked_service.py | 106 +- ...hd_insight_on_demand_linked_service_py3.py | 237 --- .../models/hd_insight_pig_activity.py | 22 +- .../models/hd_insight_pig_activity_py3.py | 87 - .../models/hd_insight_spark_activity.py | 36 +- .../models/hd_insight_spark_activity_py3.py | 100 -- .../models/hd_insight_streaming_activity.py | 53 +- .../hd_insight_streaming_activity_py3.py | 122 -- .../datafactory/models/hdfs_linked_service.py | 20 +- .../models/hdfs_linked_service_py3.py | 81 - .../mgmt/datafactory/models/hdfs_location.py | 15 +- .../datafactory/models/hdfs_location_py3.py | 45 - .../datafactory/models/hdfs_read_settings.py | 22 +- .../models/hdfs_read_settings_py3.py | 77 - .../mgmt/datafactory/models/hdfs_source.py | 12 +- .../datafactory/models/hdfs_source_py3.py | 62 - .../datafactory/models/hive_linked_service.py | 51 +- .../models/hive_linked_service_py3.py | 147 -- .../datafactory/models/hive_object_dataset.py | 16 +- .../models/hive_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/hive_source.py | 10 +- .../datafactory/models/hive_source_py3.py | 57 - .../mgmt/datafactory/models/http_dataset.py | 22 +- .../datafactory/models/http_dataset_py3.py | 99 -- .../datafactory/models/http_linked_service.py | 26 +- .../models/http_linked_service_py3.py | 105 -- .../datafactory/models/http_read_settings.py | 16 +- .../models/http_read_settings_py3.py | 63 - .../models/http_server_location.py | 10 +- .../models/http_server_location_py3.py | 50 - .../mgmt/datafactory/models/http_source.py | 10 +- .../datafactory/models/http_source_py3.py | 60 - .../models/hubspot_linked_service.py | 27 +- .../models/hubspot_linked_service_py3.py | 96 - .../models/hubspot_object_dataset.py | 12 +- .../models/hubspot_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/hubspot_source.py | 10 +- .../datafactory/models/hubspot_source_py3.py | 57 - .../models/if_condition_activity.py | 20 +- .../models/if_condition_activity_py3.py | 72 - .../models/impala_linked_service.py | 39 +- .../models/impala_linked_service_py3.py | 117 -- .../models/impala_object_dataset.py | 16 +- .../models/impala_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/impala_source.py | 10 +- .../datafactory/models/impala_source_py3.py | 57 - .../models/informix_linked_service.py | 24 +- .../models/informix_linked_service_py3.py | 86 - .../mgmt/datafactory/models/informix_sink.py | 10 +- .../datafactory/models/informix_sink_py3.py | 66 - .../datafactory/models/informix_source.py | 10 +- .../datafactory/models/informix_source_py3.py | 57 - .../models/informix_table_dataset.py | 12 +- .../models/informix_table_dataset_py3.py | 72 - .../datafactory/models/integration_runtime.py | 12 +- .../models/integration_runtime_auth_keys.py | 8 +- .../integration_runtime_auth_keys_py3.py | 32 - .../integration_runtime_compute_properties.py | 16 +- ...egration_runtime_compute_properties_py3.py | 60 - .../integration_runtime_connection_info.py | 6 +- ...integration_runtime_connection_info_py3.py | 70 - ..._runtime_custom_setup_script_properties.py | 8 +- ...time_custom_setup_script_properties_py3.py | 33 - ...tegration_runtime_data_proxy_properties.py | 10 +- ...ation_runtime_data_proxy_properties_py3.py | 37 - .../integration_runtime_monitoring_data.py | 8 +- ...integration_runtime_monitoring_data_py3.py | 33 - .../integration_runtime_node_ip_address.py | 4 +- ...integration_runtime_node_ip_address_py3.py | 35 - ...ntegration_runtime_node_monitoring_data.py | 6 +- ...ration_runtime_node_monitoring_data_py3.py | 79 - .../models/integration_runtime_py3.py | 51 - .../models/integration_runtime_reference.py | 14 +- .../integration_runtime_reference_py3.py | 48 - ...ation_runtime_regenerate_key_parameters.py | 6 +- ...n_runtime_regenerate_key_parameters_py3.py | 30 - .../models/integration_runtime_resource.py | 10 +- .../integration_runtime_resource_py3.py | 53 - .../integration_runtime_ssis_catalog_info.py | 14 +- ...tegration_runtime_ssis_catalog_info_py3.py | 55 - .../integration_runtime_ssis_properties.py | 16 +- ...integration_runtime_ssis_properties_py3.py | 59 - .../models/integration_runtime_status.py | 10 +- ...ntegration_runtime_status_list_response.py | 12 +- ...ration_runtime_status_list_response_py3.py | 40 - .../models/integration_runtime_status_py3.py | 64 - .../integration_runtime_status_response.py | 10 +- ...integration_runtime_status_response_py3.py | 42 - .../integration_runtime_vnet_properties.py | 10 +- ...integration_runtime_vnet_properties_py3.py | 38 - .../datafactory/models/jira_linked_service.py | 31 +- .../models/jira_linked_service_py3.py | 98 -- .../datafactory/models/jira_object_dataset.py | 12 +- .../models/jira_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/jira_source.py | 10 +- .../datafactory/models/jira_source_py3.py | 57 - .../mgmt/datafactory/models/json_dataset.py | 18 +- .../datafactory/models/json_dataset_py3.py | 85 - .../mgmt/datafactory/models/json_format.py | 18 +- .../datafactory/models/json_format_py3.py | 82 - .../mgmt/datafactory/models/json_sink.py | 12 +- .../mgmt/datafactory/models/json_sink_py3.py | 69 - .../mgmt/datafactory/models/json_source.py | 10 +- .../datafactory/models/json_source_py3.py | 56 - .../datafactory/models/json_write_settings.py | 10 +- .../models/json_write_settings_py3.py | 45 - .../models/linked_integration_runtime.py | 4 +- ...d_integration_runtime_key_authorization.py | 12 +- ...tegration_runtime_key_authorization_py3.py | 39 - .../models/linked_integration_runtime_py3.py | 58 - ..._integration_runtime_rbac_authorization.py | 14 +- ...egration_runtime_rbac_authorization_py3.py | 41 - .../linked_integration_runtime_request.py | 12 +- .../linked_integration_runtime_request_py3.py | 35 - .../models/linked_integration_runtime_type.py | 8 +- .../linked_integration_runtime_type_py3.py | 42 - .../mgmt/datafactory/models/linked_service.py | 18 +- .../datafactory/models/linked_service_py3.py | 102 -- .../models/linked_service_reference.py | 14 +- .../models/linked_service_reference_py3.py | 48 - .../models/linked_service_resource.py | 10 +- .../models/linked_service_resource_py3.py | 53 - .../models/log_storage_settings.py | 15 +- .../models/log_storage_settings_py3.py | 46 - .../datafactory/models/lookup_activity.py | 22 +- .../datafactory/models/lookup_activity_py3.py | 74 - .../models/magento_linked_service.py | 22 +- .../models/magento_linked_service_py3.py | 85 - .../models/magento_object_dataset.py | 12 +- .../models/magento_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/magento_source.py | 10 +- .../datafactory/models/magento_source_py3.py | 57 - .../models/managed_integration_runtime.py | 12 +- .../managed_integration_runtime_error.py | 6 +- .../managed_integration_runtime_error_py3.py | 55 - .../managed_integration_runtime_node.py | 8 +- .../managed_integration_runtime_node_py3.py | 52 - ...ed_integration_runtime_operation_result.py | 6 +- ...ntegration_runtime_operation_result_py3.py | 65 - .../models/managed_integration_runtime_py3.py | 65 - .../managed_integration_runtime_status.py | 8 +- .../managed_integration_runtime_status_py3.py | 78 - .../models/maria_db_linked_service.py | 14 +- .../models/maria_db_linked_service_py3.py | 69 - .../datafactory/models/maria_db_source.py | 10 +- .../datafactory/models/maria_db_source_py3.py | 57 - .../models/maria_db_table_dataset.py | 12 +- .../models/maria_db_table_dataset_py3.py | 72 - .../models/marketo_linked_service.py | 26 +- .../models/marketo_linked_service_py3.py | 90 - .../models/marketo_object_dataset.py | 12 +- .../models/marketo_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/marketo_source.py | 10 +- .../datafactory/models/marketo_source_py3.py | 57 - .../models/microsoft_access_linked_service.py | 24 +- .../microsoft_access_linked_service_py3.py | 86 - .../models/microsoft_access_sink.py | 10 +- .../models/microsoft_access_sink_py3.py | 66 - .../models/microsoft_access_source.py | 10 +- .../models/microsoft_access_source_py3.py | 57 - .../models/microsoft_access_table_dataset.py | 12 +- .../microsoft_access_table_dataset_py3.py | 72 - .../models/mongo_db_collection_dataset.py | 16 +- .../models/mongo_db_collection_dataset_py3.py | 73 - .../mongo_db_cursor_methods_properties.py | 14 +- .../mongo_db_cursor_methods_properties_py3.py | 53 - .../models/mongo_db_linked_service.py | 36 +- .../models/mongo_db_linked_service_py3.py | 109 -- .../datafactory/models/mongo_db_source.py | 10 +- .../datafactory/models/mongo_db_source_py3.py | 57 - .../models/mongo_db_v2_collection_dataset.py | 16 +- .../mongo_db_v2_collection_dataset_py3.py | 73 - .../models/mongo_db_v2_linked_service.py | 22 +- .../models/mongo_db_v2_linked_service_py3.py | 66 - .../datafactory/models/mongo_db_v2_source.py | 14 +- .../models/mongo_db_v2_source_py3.py | 71 - .../models/multiple_pipeline_trigger.py | 10 +- .../models/multiple_pipeline_trigger_py3.py | 68 - .../models/my_sql_linked_service.py | 16 +- .../models/my_sql_linked_service_py3.py | 70 - .../mgmt/datafactory/models/my_sql_source.py | 10 +- .../datafactory/models/my_sql_source_py3.py | 57 - .../models/my_sql_table_dataset.py | 12 +- .../models/my_sql_table_dataset_py3.py | 72 - .../models/netezza_linked_service.py | 14 +- .../models/netezza_linked_service_py3.py | 69 - .../models/netezza_partition_settings.py | 10 +- .../models/netezza_partition_settings_py3.py | 42 - .../mgmt/datafactory/models/netezza_source.py | 14 +- .../datafactory/models/netezza_source_py3.py | 70 - .../models/netezza_table_dataset.py | 16 +- .../models/netezza_table_dataset_py3.py | 82 - .../models/odata_linked_service.py | 36 +- .../models/odata_linked_service_py3.py | 127 -- .../models/odata_resource_dataset.py | 12 +- .../models/odata_resource_dataset_py3.py | 72 - .../mgmt/datafactory/models/odata_source.py | 10 +- .../datafactory/models/odata_source_py3.py | 57 - .../datafactory/models/odbc_linked_service.py | 24 +- .../models/odbc_linked_service_py3.py | 86 - .../mgmt/datafactory/models/odbc_sink.py | 10 +- .../mgmt/datafactory/models/odbc_sink_py3.py | 66 - .../mgmt/datafactory/models/odbc_source.py | 10 +- .../datafactory/models/odbc_source_py3.py | 57 - .../datafactory/models/odbc_table_dataset.py | 12 +- .../models/odbc_table_dataset_py3.py | 72 - .../datafactory/models/office365_dataset.py | 18 +- .../models/office365_dataset_py3.py | 79 - .../models/office365_linked_service.py | 34 +- .../models/office365_linked_service_py3.py | 83 - .../datafactory/models/office365_source.py | 18 +- .../models/office365_source_py3.py | 78 - .../mgmt/datafactory/models/operation.py | 12 +- .../datafactory/models/operation_display.py | 12 +- .../models/operation_display_py3.py | 41 - .../models/operation_log_specification.py | 10 +- .../models/operation_log_specification_py3.py | 37 - .../models/operation_metric_availability.py | 8 +- .../operation_metric_availability_py3.py | 33 - .../models/operation_metric_dimension.py | 10 +- .../models/operation_metric_dimension_py3.py | 37 - .../models/operation_metric_specification.py | 24 +- .../operation_metric_specification_py3.py | 68 - .../mgmt/datafactory/models/operation_py3.py | 41 - .../models/operation_service_specification.py | 8 +- .../operation_service_specification_py3.py | 34 - .../models/oracle_linked_service.py | 16 +- .../models/oracle_linked_service_py3.py | 71 - .../models/oracle_partition_settings.py | 12 +- .../models/oracle_partition_settings_py3.py | 46 - .../oracle_service_cloud_linked_service.py | 32 +- ...oracle_service_cloud_linked_service_py3.py | 95 - .../oracle_service_cloud_object_dataset.py | 12 +- ...oracle_service_cloud_object_dataset_py3.py | 72 - .../models/oracle_service_cloud_source.py | 10 +- .../models/oracle_service_cloud_source_py3.py | 57 - .../mgmt/datafactory/models/oracle_sink.py | 10 +- .../datafactory/models/oracle_sink_py3.py | 66 - .../mgmt/datafactory/models/oracle_source.py | 16 +- .../datafactory/models/oracle_source_py3.py | 76 - .../models/oracle_table_dataset.py | 16 +- .../models/oracle_table_dataset_py3.py | 82 - .../mgmt/datafactory/models/orc_format.py | 15 +- .../mgmt/datafactory/models/orc_format_py3.py | 46 - .../models/parameter_specification.py | 14 +- .../models/parameter_specification_py3.py | 39 - .../datafactory/models/parquet_dataset.py | 16 +- .../datafactory/models/parquet_dataset_py3.py | 76 - .../mgmt/datafactory/models/parquet_format.py | 15 +- .../datafactory/models/parquet_format_py3.py | 46 - .../mgmt/datafactory/models/parquet_sink.py | 10 +- .../datafactory/models/parquet_sink_py3.py | 65 - .../mgmt/datafactory/models/parquet_source.py | 10 +- .../datafactory/models/parquet_source_py3.py | 56 - .../models/paypal_linked_service.py | 28 +- .../models/paypal_linked_service_py3.py | 92 - .../models/paypal_object_dataset.py | 12 +- .../models/paypal_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/paypal_source.py | 10 +- .../datafactory/models/paypal_source_py3.py | 57 - .../models/phoenix_linked_service.py | 40 +- .../models/phoenix_linked_service_py3.py | 121 -- .../models/phoenix_object_dataset.py | 16 +- .../models/phoenix_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/phoenix_source.py | 10 +- .../datafactory/models/phoenix_source_py3.py | 57 - .../datafactory/models/pipeline_folder.py | 6 +- .../datafactory/models/pipeline_folder_py3.py | 29 - .../datafactory/models/pipeline_reference.py | 15 +- .../models/pipeline_reference_py3.py | 48 - .../datafactory/models/pipeline_resource.py | 20 +- .../models/pipeline_resource_py3.py | 84 - .../mgmt/datafactory/models/pipeline_run.py | 6 +- .../models/pipeline_run_invoked_by.py | 4 +- .../models/pipeline_run_invoked_by_py3.py | 45 - .../datafactory/models/pipeline_run_py3.py | 99 -- .../models/pipeline_runs_query_response.py | 12 +- .../pipeline_runs_query_response_py3.py | 39 - .../datafactory/models/polybase_settings.py | 14 +- .../models/polybase_settings_py3.py | 53 - .../models/postgre_sql_linked_service.py | 16 +- .../models/postgre_sql_linked_service_py3.py | 70 - .../datafactory/models/postgre_sql_source.py | 10 +- .../models/postgre_sql_source_py3.py | 57 - .../models/postgre_sql_table_dataset.py | 16 +- .../models/postgre_sql_table_dataset_py3.py | 82 - .../models/presto_linked_service.py | 50 +- .../models/presto_linked_service_py3.py | 132 -- .../models/presto_object_dataset.py | 16 +- .../models/presto_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/presto_source.py | 10 +- .../datafactory/models/presto_source_py3.py | 57 - .../models/quick_books_linked_service.py | 42 +- .../models/quick_books_linked_service_py3.py | 100 -- .../models/quick_books_object_dataset.py | 12 +- .../models/quick_books_object_dataset_py3.py | 72 - .../datafactory/models/quick_books_source.py | 10 +- .../models/quick_books_source_py3.py | 57 - .../datafactory/models/recurrence_schedule.py | 16 +- .../models/recurrence_schedule_occurrence.py | 10 +- .../recurrence_schedule_occurrence_py3.py | 38 - .../models/recurrence_schedule_py3.py | 50 - .../redirect_incompatible_row_settings.py | 20 +- .../redirect_incompatible_row_settings_py3.py | 47 - .../models/redshift_unload_settings.py | 24 +- .../models/redshift_unload_settings_py3.py | 48 - .../datafactory/models/relational_source.py | 10 +- .../models/relational_source_py3.py | 57 - .../models/relational_table_dataset.py | 12 +- .../models/relational_table_dataset_py3.py | 72 - .../models/rerun_trigger_resource.py | 10 +- .../models/rerun_trigger_resource_py3.py | 54 - .../models/rerun_tumbling_window_trigger.py | 28 +- ...mbling_window_trigger_action_parameters.py | 22 +- ...ng_window_trigger_action_parameters_py3.py | 47 - .../rerun_tumbling_window_trigger_py3.py | 78 - .../azure/mgmt/datafactory/models/resource.py | 8 +- .../mgmt/datafactory/models/resource_py3.py | 58 - .../models/responsys_linked_service.py | 28 +- .../models/responsys_linked_service_py3.py | 94 - .../models/responsys_object_dataset.py | 12 +- .../models/responsys_object_dataset_py3.py | 72 - .../datafactory/models/responsys_source.py | 10 +- .../models/responsys_source_py3.py | 57 - .../models/rest_resource_dataset.py | 20 +- .../models/rest_resource_dataset_py3.py | 93 - .../models/rest_service_linked_service.py | 36 +- .../models/rest_service_linked_service_py3.py | 107 -- .../mgmt/datafactory/models/rest_source.py | 20 +- .../datafactory/models/rest_source_py3.py | 86 - .../mgmt/datafactory/models/retry_policy.py | 8 +- .../datafactory/models/retry_policy_py3.py | 38 - .../models/run_filter_parameters.py | 24 +- .../models/run_filter_parameters_py3.py | 54 - .../datafactory/models/run_query_filter.py | 34 +- .../models/run_query_filter_py3.py | 53 - .../datafactory/models/run_query_order_by.py | 28 +- .../models/run_query_order_by_py3.py | 46 - .../models/salesforce_linked_service.py | 18 +- .../models/salesforce_linked_service_py3.py | 82 - ...lesforce_marketing_cloud_linked_service.py | 25 +- ...orce_marketing_cloud_linked_service_py3.py | 91 - ...lesforce_marketing_cloud_object_dataset.py | 12 +- ...orce_marketing_cloud_object_dataset_py3.py | 72 - .../salesforce_marketing_cloud_source.py | 10 +- .../salesforce_marketing_cloud_source_py3.py | 57 - .../models/salesforce_object_dataset.py | 12 +- .../models/salesforce_object_dataset_py3.py | 72 - ...salesforce_service_cloud_linked_service.py | 20 +- ...sforce_service_cloud_linked_service_py3.py | 87 - ...salesforce_service_cloud_object_dataset.py | 12 +- ...sforce_service_cloud_object_dataset_py3.py | 72 - .../models/salesforce_service_cloud_sink.py | 14 +- .../salesforce_service_cloud_sink_py3.py | 84 - .../models/salesforce_service_cloud_source.py | 12 +- .../salesforce_service_cloud_source_py3.py | 63 - .../datafactory/models/salesforce_sink.py | 14 +- .../datafactory/models/salesforce_sink_py3.py | 84 - .../datafactory/models/salesforce_source.py | 12 +- .../models/salesforce_source_py3.py | 63 - .../datafactory/models/sap_bw_cube_dataset.py | 22 +- .../models/sap_bw_cube_dataset_py3.py | 67 - .../models/sap_bw_linked_service.py | 36 +- .../models/sap_bw_linked_service_py3.py | 88 - .../mgmt/datafactory/models/sap_bw_source.py | 10 +- .../datafactory/models/sap_bw_source_py3.py | 57 - .../sap_cloud_for_customer_linked_service.py | 22 +- ...p_cloud_for_customer_linked_service_py3.py | 76 - ...sap_cloud_for_customer_resource_dataset.py | 16 +- ...cloud_for_customer_resource_dataset_py3.py | 73 - .../models/sap_cloud_for_customer_sink.py | 10 +- .../models/sap_cloud_for_customer_sink_py3.py | 67 - .../models/sap_cloud_for_customer_source.py | 10 +- .../sap_cloud_for_customer_source_py3.py | 57 - .../models/sap_ecc_linked_service.py | 18 +- .../models/sap_ecc_linked_service_py3.py | 76 - .../models/sap_ecc_resource_dataset.py | 16 +- .../models/sap_ecc_resource_dataset_py3.py | 73 - .../mgmt/datafactory/models/sap_ecc_source.py | 10 +- .../datafactory/models/sap_ecc_source_py3.py | 57 - .../models/sap_hana_linked_service.py | 24 +- .../models/sap_hana_linked_service_py3.py | 85 - .../datafactory/models/sap_hana_source.py | 12 +- .../datafactory/models/sap_hana_source_py3.py | 62 - .../models/sap_hana_table_dataset.py | 14 +- .../models/sap_hana_table_dataset_py3.py | 77 - .../models/sap_open_hub_linked_service.py | 37 +- .../models/sap_open_hub_linked_service_py3.py | 99 -- .../datafactory/models/sap_open_hub_source.py | 12 +- .../models/sap_open_hub_source_py3.py | 66 - .../models/sap_open_hub_table_dataset.py | 22 +- .../models/sap_open_hub_table_dataset_py3.py | 87 - .../models/sap_table_linked_service.py | 40 +- .../models/sap_table_linked_service_py3.py | 140 -- .../models/sap_table_partition_settings.py | 12 +- .../sap_table_partition_settings_py3.py | 47 - .../models/sap_table_resource_dataset.py | 16 +- .../models/sap_table_resource_dataset_py3.py | 73 - .../datafactory/models/sap_table_source.py | 24 +- .../models/sap_table_source_py3.py | 100 -- .../datafactory/models/schedule_trigger.py | 12 +- .../models/schedule_trigger_py3.py | 64 - .../models/schedule_trigger_recurrence.py | 18 +- .../models/schedule_trigger_recurrence_py3.py | 54 - .../mgmt/datafactory/models/script_action.py | 22 +- .../datafactory/models/script_action_py3.py | 49 - .../mgmt/datafactory/models/secret_base.py | 8 +- .../datafactory/models/secret_base_py3.py | 41 - .../mgmt/datafactory/models/secure_string.py | 12 +- .../datafactory/models/secure_string_py3.py | 40 - ...dency_tumbling_window_trigger_reference.py | 16 +- ...y_tumbling_window_trigger_reference_py3.py | 46 - .../models/self_hosted_integration_runtime.py | 10 +- .../self_hosted_integration_runtime_node.py | 6 +- ...elf_hosted_integration_runtime_node_py3.py | 139 -- .../self_hosted_integration_runtime_py3.py | 46 - .../self_hosted_integration_runtime_status.py | 12 +- ...f_hosted_integration_runtime_status_py3.py | 146 -- .../models/service_now_linked_service.py | 34 +- .../models/service_now_linked_service_py3.py | 106 -- .../models/service_now_object_dataset.py | 12 +- .../models/service_now_object_dataset_py3.py | 72 - .../datafactory/models/service_now_source.py | 10 +- .../models/service_now_source_py3.py | 57 - .../models/set_variable_activity.py | 14 +- .../models/set_variable_activity_py3.py | 59 - .../mgmt/datafactory/models/sftp_location.py | 15 +- .../datafactory/models/sftp_location_py3.py | 45 - .../datafactory/models/sftp_read_settings.py | 18 +- .../models/sftp_read_settings_py3.py | 68 - .../models/sftp_server_linked_service.py | 34 +- .../models/sftp_server_linked_service_py3.py | 119 -- .../models/shopify_linked_service.py | 22 +- .../models/shopify_linked_service_py3.py | 86 - .../models/shopify_object_dataset.py | 12 +- .../models/shopify_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/shopify_source.py | 10 +- .../datafactory/models/shopify_source_py3.py | 57 - .../models/spark_linked_service.py | 46 +- .../models/spark_linked_service_py3.py | 131 -- .../models/spark_object_dataset.py | 16 +- .../models/spark_object_dataset_py3.py | 82 - .../mgmt/datafactory/models/spark_source.py | 10 +- .../datafactory/models/spark_source_py3.py | 57 - .../mgmt/datafactory/models/sql_dw_sink.py | 16 +- .../datafactory/models/sql_dw_sink_py3.py | 83 - .../mgmt/datafactory/models/sql_dw_source.py | 14 +- .../datafactory/models/sql_dw_source_py3.py | 70 - .../mgmt/datafactory/models/sql_mi_sink.py | 20 +- .../datafactory/models/sql_mi_sink_py3.py | 93 - .../mgmt/datafactory/models/sql_mi_source.py | 16 +- .../datafactory/models/sql_mi_source_py3.py | 73 - .../models/sql_server_linked_service.py | 18 +- .../models/sql_server_linked_service_py3.py | 74 - .../datafactory/models/sql_server_sink.py | 20 +- .../datafactory/models/sql_server_sink_py3.py | 93 - .../datafactory/models/sql_server_source.py | 16 +- .../models/sql_server_source_py3.py | 73 - .../sql_server_stored_procedure_activity.py | 18 +- ...ql_server_stored_procedure_activity_py3.py | 70 - .../models/sql_server_table_dataset.py | 16 +- .../models/sql_server_table_dataset_py3.py | 82 - .../azure/mgmt/datafactory/models/sql_sink.py | 20 +- .../mgmt/datafactory/models/sql_sink_py3.py | 93 - .../mgmt/datafactory/models/sql_source.py | 14 +- .../mgmt/datafactory/models/sql_source_py3.py | 69 - .../models/square_linked_service.py | 34 +- .../models/square_linked_service_py3.py | 98 -- .../models/square_object_dataset.py | 12 +- .../models/square_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/square_source.py | 10 +- .../datafactory/models/square_source_py3.py | 57 - .../models/ssis_access_credential.py | 18 +- .../models/ssis_access_credential_py3.py | 44 - .../datafactory/models/ssis_environment.py | 12 +- .../models/ssis_environment_py3.py | 51 - .../models/ssis_environment_reference.py | 12 +- .../models/ssis_environment_reference_py3.py | 40 - .../models/ssis_execution_credential.py | 18 +- .../models/ssis_execution_credential_py3.py | 44 - .../models/ssis_execution_parameter.py | 12 +- .../models/ssis_execution_parameter_py3.py | 35 - .../mgmt/datafactory/models/ssis_folder.py | 15 +- .../datafactory/models/ssis_folder_py3.py | 43 - .../datafactory/models/ssis_log_location.py | 19 +- .../models/ssis_log_location_py3.py | 57 - .../models/ssis_object_metadata.py | 14 +- .../ssis_object_metadata_list_response.py | 8 +- .../ssis_object_metadata_list_response_py3.py | 33 - .../models/ssis_object_metadata_py3.py | 53 - .../ssis_object_metadata_status_response.py | 12 +- ...sis_object_metadata_status_response_py3.py | 40 - .../mgmt/datafactory/models/ssis_package.py | 16 +- .../models/ssis_package_location.py | 20 +- .../models/ssis_package_location_py3.py | 54 - .../datafactory/models/ssis_package_py3.py | 59 - .../mgmt/datafactory/models/ssis_parameter.py | 28 +- .../datafactory/models/ssis_parameter_py3.py | 72 - .../mgmt/datafactory/models/ssis_project.py | 16 +- .../datafactory/models/ssis_project_py3.py | 60 - .../models/ssis_property_override.py | 14 +- .../models/ssis_property_override_py3.py | 40 - .../mgmt/datafactory/models/ssis_variable.py | 18 +- .../datafactory/models/ssis_variable_py3.py | 52 - .../datafactory/models/staging_settings.py | 16 +- .../models/staging_settings_py3.py | 51 - .../datafactory/models/store_read_settings.py | 14 +- .../models/store_read_settings_py3.py | 45 - .../models/store_write_settings.py | 14 +- .../models/store_write_settings_py3.py | 57 - .../models/stored_procedure_parameter.py | 8 +- .../models/stored_procedure_parameter_py3.py | 35 - .../mgmt/datafactory/models/sub_resource.py | 4 +- .../datafactory/models/sub_resource_py3.py | 50 - .../models/sybase_linked_service.py | 30 +- .../models/sybase_linked_service_py3.py | 91 - .../mgmt/datafactory/models/sybase_source.py | 10 +- .../datafactory/models/sybase_source_py3.py | 57 - .../models/sybase_table_dataset.py | 12 +- .../models/sybase_table_dataset_py3.py | 72 - .../models/teradata_linked_service.py | 20 +- .../models/teradata_linked_service_py3.py | 84 - .../models/teradata_partition_settings.py | 10 +- .../models/teradata_partition_settings_py3.py | 42 - .../datafactory/models/teradata_source.py | 14 +- .../datafactory/models/teradata_source_py3.py | 70 - .../models/teradata_table_dataset.py | 14 +- .../models/teradata_table_dataset_py3.py | 77 - .../mgmt/datafactory/models/text_format.py | 26 +- .../datafactory/models/text_format_py3.py | 99 -- .../azure/mgmt/datafactory/models/trigger.py | 14 +- .../models/trigger_dependency_reference.py | 12 +- .../trigger_dependency_reference_py3.py | 46 - .../models/trigger_pipeline_reference.py | 8 +- .../models/trigger_pipeline_reference_py3.py | 32 - .../mgmt/datafactory/models/trigger_py3.py | 68 - .../datafactory/models/trigger_reference.py | 13 +- .../models/trigger_reference_py3.py | 44 - .../datafactory/models/trigger_resource.py | 10 +- .../models/trigger_resource_py3.py | 53 - .../mgmt/datafactory/models/trigger_run.py | 6 +- .../datafactory/models/trigger_run_py3.py | 78 - .../models/trigger_runs_query_response.py | 12 +- .../models/trigger_runs_query_response_py3.py | 39 - .../trigger_subscription_operation_status.py | 4 +- ...igger_subscription_operation_status_py3.py | 42 - .../models/tumbling_window_trigger.py | 48 +- ...ing_window_trigger_dependency_reference.py | 14 +- ...window_trigger_dependency_reference_py3.py | 50 - .../models/tumbling_window_trigger_py3.py | 112 -- .../mgmt/datafactory/models/until_activity.py | 22 +- .../datafactory/models/until_activity_py3.py | 72 - ...update_integration_runtime_node_request.py | 6 +- ...te_integration_runtime_node_request_py3.py | 34 - .../update_integration_runtime_request.py | 8 +- .../update_integration_runtime_request_py3.py | 38 - .../datafactory/models/user_access_policy.py | 14 +- .../models/user_access_policy_py3.py | 51 - .../mgmt/datafactory/models/user_property.py | 16 +- .../datafactory/models/user_property_py3.py | 40 - .../datafactory/models/validation_activity.py | 22 +- .../models/validation_activity_py3.py | 81 - .../models/variable_specification.py | 14 +- .../models/variable_specification_py3.py | 39 - .../models/vertica_linked_service.py | 14 +- .../models/vertica_linked_service_py3.py | 69 - .../mgmt/datafactory/models/vertica_source.py | 10 +- .../datafactory/models/vertica_source_py3.py | 57 - .../models/vertica_table_dataset.py | 16 +- .../models/vertica_table_dataset_py3.py | 82 - .../mgmt/datafactory/models/wait_activity.py | 14 +- .../datafactory/models/wait_activity_py3.py | 56 - .../mgmt/datafactory/models/web_activity.py | 32 +- .../models/web_activity_authentication.py | 19 +- .../models/web_activity_authentication_py3.py | 53 - .../datafactory/models/web_activity_py3.py | 98 -- .../models/web_anonymous_authentication.py | 15 +- .../web_anonymous_authentication_py3.py | 41 - .../models/web_basic_authentication.py | 20 +- .../models/web_basic_authentication_py3.py | 52 - .../web_client_certificate_authentication.py | 18 +- ...b_client_certificate_authentication_py3.py | 53 - .../datafactory/models/web_hook_activity.py | 27 +- .../models/web_hook_activity_py3.py | 92 - .../datafactory/models/web_linked_service.py | 12 +- .../models/web_linked_service_py3.py | 59 - .../web_linked_service_type_properties.py | 12 +- .../web_linked_service_type_properties_py3.py | 50 - .../mgmt/datafactory/models/web_source.py | 16 +- .../mgmt/datafactory/models/web_source_py3.py | 52 - .../datafactory/models/web_table_dataset.py | 18 +- .../models/web_table_dataset_py3.py | 78 - .../datafactory/models/xero_linked_service.py | 25 +- .../models/xero_linked_service_py3.py | 93 - .../datafactory/models/xero_object_dataset.py | 12 +- .../models/xero_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/xero_source.py | 10 +- .../datafactory/models/xero_source_py3.py | 57 - .../datafactory/models/zoho_linked_service.py | 22 +- .../models/zoho_linked_service_py3.py | 85 - .../datafactory/models/zoho_object_dataset.py | 12 +- .../models/zoho_object_dataset_py3.py | 72 - .../mgmt/datafactory/models/zoho_source.py | 10 +- .../datafactory/models/zoho_source_py3.py | 57 - .../operations/activity_runs_operations.py | 6 +- .../operations/datasets_operations.py | 24 +- .../operations/exposure_control_operations.py | 12 +- .../operations/factories_operations.py | 55 +- .../integration_runtime_nodes_operations.py | 23 +- ...tion_runtime_object_metadata_operations.py | 63 +- .../integration_runtimes_operations.py | 188 +- .../operations/linked_services_operations.py | 24 +- .../mgmt/datafactory/operations/operations.py | 7 +- .../operations/pipeline_runs_operations.py | 17 +- .../operations/pipelines_operations.py | 30 +- .../operations/rerun_triggers_operations.py | 167 +- .../operations/trigger_runs_operations.py | 11 +- .../operations/triggers_operations.py | 240 ++- swagger_to_sdk_config.json | 2 +- 1048 files changed, 4890 insertions(+), 41404 deletions(-) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py index bb8a2a22fd77..40e1e7c37322 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -9,7 +9,7 @@ # regenerated. # -------------------------------------------------------------------------- -from msrest.service_client import SDKClient +from msrest.service_client import ServiceClient from msrest import Serializer, Deserializer from msrestazure import AzureConfiguration from .version import VERSION @@ -62,7 +62,7 @@ def __init__( self.subscription_id = subscription_id -class DataFactoryManagementClient(SDKClient): +class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. :ivar config: Configuration for client. @@ -109,7 +109,7 @@ def __init__( self, credentials, subscription_id, base_url=None): self.config = DataFactoryManagementClientConfiguration(credentials, subscription_id, base_url) - super(DataFactoryManagementClient, self).__init__(self.config.credentials, self.config) + self._client = ServiceClient(self.config.credentials, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self.api_version = '2018-06-01' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 395f7908afbd..cb75267dd073 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -9,1038 +9,521 @@ # regenerated. # -------------------------------------------------------------------------- -try: - from .resource_py3 import Resource - from .sub_resource_py3 import SubResource - from .expression_py3 import Expression - from .secure_string_py3 import SecureString - from .linked_service_reference_py3 import LinkedServiceReference - from .azure_key_vault_secret_reference_py3 import AzureKeyVaultSecretReference - from .secret_base_py3 import SecretBase - from .factory_identity_py3 import FactoryIdentity - from .factory_repo_configuration_py3 import FactoryRepoConfiguration - from .factory_py3 import Factory - from .integration_runtime_py3 import IntegrationRuntime - from .integration_runtime_resource_py3 import IntegrationRuntimeResource - from .integration_runtime_reference_py3 import IntegrationRuntimeReference - from .integration_runtime_status_py3 import IntegrationRuntimeStatus - from .integration_runtime_status_response_py3 import IntegrationRuntimeStatusResponse - from .integration_runtime_status_list_response_py3 import IntegrationRuntimeStatusListResponse - from .update_integration_runtime_request_py3 import UpdateIntegrationRuntimeRequest - from .update_integration_runtime_node_request_py3 import UpdateIntegrationRuntimeNodeRequest - from .linked_integration_runtime_request_py3 import LinkedIntegrationRuntimeRequest - from .create_linked_integration_runtime_request_py3 import CreateLinkedIntegrationRuntimeRequest - from .parameter_specification_py3 import ParameterSpecification - from .linked_service_py3 import LinkedService - from .linked_service_resource_py3 import LinkedServiceResource - from .dataset_folder_py3 import DatasetFolder - from .dataset_py3 import Dataset - from .dataset_resource_py3 import DatasetResource - from .activity_dependency_py3 import ActivityDependency - from .user_property_py3 import UserProperty - from .activity_py3 import Activity - from .variable_specification_py3 import VariableSpecification - from .pipeline_folder_py3 import PipelineFolder - from .pipeline_resource_py3 import PipelineResource - from .trigger_py3 import Trigger - from .trigger_resource_py3 import TriggerResource - from .create_run_response_py3 import CreateRunResponse - from .trigger_subscription_operation_status_py3 import TriggerSubscriptionOperationStatus - from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration - from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration - from .factory_repo_update_py3 import FactoryRepoUpdate - from .git_hub_access_token_request_py3 import GitHubAccessTokenRequest - from .git_hub_access_token_response_py3 import GitHubAccessTokenResponse - from .user_access_policy_py3 import UserAccessPolicy - from .access_policy_response_py3 import AccessPolicyResponse - from .pipeline_reference_py3 import PipelineReference - from .trigger_pipeline_reference_py3 import TriggerPipelineReference - from .factory_update_parameters_py3 import FactoryUpdateParameters - from .dataset_reference_py3 import DatasetReference - from .run_query_filter_py3 import RunQueryFilter - from .run_query_order_by_py3 import RunQueryOrderBy - from .run_filter_parameters_py3 import RunFilterParameters - from .pipeline_run_invoked_by_py3 import PipelineRunInvokedBy - from .pipeline_run_py3 import PipelineRun - from .pipeline_runs_query_response_py3 import PipelineRunsQueryResponse - from .activity_run_py3 import ActivityRun - from .activity_runs_query_response_py3 import ActivityRunsQueryResponse - from .trigger_run_py3 import TriggerRun - from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse - from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters - from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger - from .rerun_trigger_resource_py3 import RerunTriggerResource - from .operation_display_py3 import OperationDisplay - from .operation_log_specification_py3 import OperationLogSpecification - from .operation_metric_availability_py3 import OperationMetricAvailability - from .operation_metric_dimension_py3 import OperationMetricDimension - from .operation_metric_specification_py3 import OperationMetricSpecification - from .operation_service_specification_py3 import OperationServiceSpecification - from .operation_py3 import Operation - from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest - from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse - from .exposure_control_request_py3 import ExposureControlRequest - from .exposure_control_response_py3 import ExposureControlResponse - from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference - from .trigger_reference_py3 import TriggerReference - from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference - from .trigger_dependency_reference_py3 import TriggerDependencyReference - from .dependency_reference_py3 import DependencyReference - from .retry_policy_py3 import RetryPolicy - from .tumbling_window_trigger_py3 import TumblingWindowTrigger - from .blob_events_trigger_py3 import BlobEventsTrigger - from .blob_trigger_py3 import BlobTrigger - from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence - from .recurrence_schedule_py3 import RecurrenceSchedule - from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence - from .schedule_trigger_py3 import ScheduleTrigger - from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - from .azure_function_linked_service_py3 import AzureFunctionLinkedService - from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService - from .sap_table_linked_service_py3 import SapTableLinkedService - from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService - from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService - from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService - from .responsys_linked_service_py3 import ResponsysLinkedService - from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService - from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService - from .script_action_py3 import ScriptAction - from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService - from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService - from .netezza_linked_service_py3 import NetezzaLinkedService - from .vertica_linked_service_py3 import VerticaLinkedService - from .zoho_linked_service_py3 import ZohoLinkedService - from .xero_linked_service_py3 import XeroLinkedService - from .square_linked_service_py3 import SquareLinkedService - from .spark_linked_service_py3 import SparkLinkedService - from .shopify_linked_service_py3 import ShopifyLinkedService - from .service_now_linked_service_py3 import ServiceNowLinkedService - from .quick_books_linked_service_py3 import QuickBooksLinkedService - from .presto_linked_service_py3 import PrestoLinkedService - from .phoenix_linked_service_py3 import PhoenixLinkedService - from .paypal_linked_service_py3 import PaypalLinkedService - from .marketo_linked_service_py3 import MarketoLinkedService - from .azure_maria_db_linked_service_py3 import AzureMariaDBLinkedService - from .maria_db_linked_service_py3 import MariaDBLinkedService - from .magento_linked_service_py3 import MagentoLinkedService - from .jira_linked_service_py3 import JiraLinkedService - from .impala_linked_service_py3 import ImpalaLinkedService - from .hubspot_linked_service_py3 import HubspotLinkedService - from .hive_linked_service_py3 import HiveLinkedService - from .hbase_linked_service_py3 import HBaseLinkedService - from .greenplum_linked_service_py3 import GreenplumLinkedService - from .google_big_query_linked_service_py3 import GoogleBigQueryLinkedService - from .eloqua_linked_service_py3 import EloquaLinkedService - from .drill_linked_service_py3 import DrillLinkedService - from .couchbase_linked_service_py3 import CouchbaseLinkedService - from .concur_linked_service_py3 import ConcurLinkedService - from .azure_postgre_sql_linked_service_py3 import AzurePostgreSqlLinkedService - from .amazon_mws_linked_service_py3 import AmazonMWSLinkedService - from .sap_hana_linked_service_py3 import SapHanaLinkedService - from .sap_bw_linked_service_py3 import SapBWLinkedService - from .sftp_server_linked_service_py3 import SftpServerLinkedService - from .ftp_server_linked_service_py3 import FtpServerLinkedService - from .http_linked_service_py3 import HttpLinkedService - from .azure_search_linked_service_py3 import AzureSearchLinkedService - from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService - from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService - from .amazon_s3_linked_service_py3 import AmazonS3LinkedService - from .rest_service_linked_service_py3 import RestServiceLinkedService - from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService - from .sap_ecc_linked_service_py3 import SapEccLinkedService - from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService - from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService - from .salesforce_linked_service_py3 import SalesforceLinkedService - from .office365_linked_service_py3 import Office365LinkedService - from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService - from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService - from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService - from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService - from .mongo_db_linked_service_py3 import MongoDbLinkedService - from .cassandra_linked_service_py3 import CassandraLinkedService - from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication - from .web_basic_authentication_py3 import WebBasicAuthentication - from .web_anonymous_authentication_py3 import WebAnonymousAuthentication - from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - from .web_linked_service_py3 import WebLinkedService - from .odata_linked_service_py3 import ODataLinkedService - from .hdfs_linked_service_py3 import HdfsLinkedService - from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService - from .informix_linked_service_py3 import InformixLinkedService - from .odbc_linked_service_py3 import OdbcLinkedService - from .azure_ml_linked_service_py3 import AzureMLLinkedService - from .teradata_linked_service_py3 import TeradataLinkedService - from .db2_linked_service_py3 import Db2LinkedService - from .sybase_linked_service_py3 import SybaseLinkedService - from .postgre_sql_linked_service_py3 import PostgreSqlLinkedService - from .my_sql_linked_service_py3 import MySqlLinkedService - from .azure_my_sql_linked_service_py3 import AzureMySqlLinkedService - from .oracle_linked_service_py3 import OracleLinkedService - from .file_server_linked_service_py3 import FileServerLinkedService - from .hd_insight_linked_service_py3 import HDInsightLinkedService - from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService - from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService - from .dynamics_linked_service_py3 import DynamicsLinkedService - from .cosmos_db_linked_service_py3 import CosmosDbLinkedService - from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService - from .azure_batch_linked_service_py3 import AzureBatchLinkedService - from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService - from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService - from .sql_server_linked_service_py3 import SqlServerLinkedService - from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService - from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService - from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService - from .azure_storage_linked_service_py3 import AzureStorageLinkedService - from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset - from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset - from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset - from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset - from .responsys_object_dataset_py3 import ResponsysObjectDataset - from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset - from .vertica_table_dataset_py3 import VerticaTableDataset - from .netezza_table_dataset_py3 import NetezzaTableDataset - from .zoho_object_dataset_py3 import ZohoObjectDataset - from .xero_object_dataset_py3 import XeroObjectDataset - from .square_object_dataset_py3 import SquareObjectDataset - from .spark_object_dataset_py3 import SparkObjectDataset - from .shopify_object_dataset_py3 import ShopifyObjectDataset - from .service_now_object_dataset_py3 import ServiceNowObjectDataset - from .quick_books_object_dataset_py3 import QuickBooksObjectDataset - from .presto_object_dataset_py3 import PrestoObjectDataset - from .phoenix_object_dataset_py3 import PhoenixObjectDataset - from .paypal_object_dataset_py3 import PaypalObjectDataset - from .marketo_object_dataset_py3 import MarketoObjectDataset - from .azure_maria_db_table_dataset_py3 import AzureMariaDBTableDataset - from .maria_db_table_dataset_py3 import MariaDBTableDataset - from .magento_object_dataset_py3 import MagentoObjectDataset - from .jira_object_dataset_py3 import JiraObjectDataset - from .impala_object_dataset_py3 import ImpalaObjectDataset - from .hubspot_object_dataset_py3 import HubspotObjectDataset - from .hive_object_dataset_py3 import HiveObjectDataset - from .hbase_object_dataset_py3 import HBaseObjectDataset - from .greenplum_table_dataset_py3 import GreenplumTableDataset - from .google_big_query_object_dataset_py3 import GoogleBigQueryObjectDataset - from .eloqua_object_dataset_py3 import EloquaObjectDataset - from .drill_table_dataset_py3 import DrillTableDataset - from .couchbase_table_dataset_py3 import CouchbaseTableDataset - from .concur_object_dataset_py3 import ConcurObjectDataset - from .azure_postgre_sql_table_dataset_py3 import AzurePostgreSqlTableDataset - from .amazon_mws_object_dataset_py3 import AmazonMWSObjectDataset - from .dataset_zip_deflate_compression_py3 import DatasetZipDeflateCompression - from .dataset_deflate_compression_py3 import DatasetDeflateCompression - from .dataset_gzip_compression_py3 import DatasetGZipCompression - from .dataset_bzip2_compression_py3 import DatasetBZip2Compression - from .dataset_compression_py3 import DatasetCompression - from .parquet_format_py3 import ParquetFormat - from .orc_format_py3 import OrcFormat - from .avro_format_py3 import AvroFormat - from .json_format_py3 import JsonFormat - from .text_format_py3 import TextFormat - from .dataset_storage_format_py3 import DatasetStorageFormat - from .http_dataset_py3 import HttpDataset - from .azure_search_index_dataset_py3 import AzureSearchIndexDataset - from .web_table_dataset_py3 import WebTableDataset - from .sap_table_resource_dataset_py3 import SapTableResourceDataset - from .rest_resource_dataset_py3 import RestResourceDataset - from .sql_server_table_dataset_py3 import SqlServerTableDataset - from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset - from .sap_hana_table_dataset_py3 import SapHanaTableDataset - from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset - from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset - from .sap_bw_cube_dataset_py3 import SapBwCubeDataset - from .sybase_table_dataset_py3 import SybaseTableDataset - from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset - from .salesforce_object_dataset_py3 import SalesforceObjectDataset - from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset - from .postgre_sql_table_dataset_py3 import PostgreSqlTableDataset - from .my_sql_table_dataset_py3 import MySqlTableDataset - from .odbc_table_dataset_py3 import OdbcTableDataset - from .informix_table_dataset_py3 import InformixTableDataset - from .relational_table_dataset_py3 import RelationalTableDataset - from .db2_table_dataset_py3 import Db2TableDataset - from .amazon_redshift_table_dataset_py3 import AmazonRedshiftTableDataset - from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset - from .teradata_table_dataset_py3 import TeradataTableDataset - from .oracle_table_dataset_py3 import OracleTableDataset - from .odata_resource_dataset_py3 import ODataResourceDataset - from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset - from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset - from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset - from .file_share_dataset_py3 import FileShareDataset - from .office365_dataset_py3 import Office365Dataset - from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset - from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset - from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset - from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset - from .dynamics_entity_dataset_py3 import DynamicsEntityDataset - from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset - from .custom_dataset_py3 import CustomDataset - from .cassandra_table_dataset_py3 import CassandraTableDataset - from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset - from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset - from .azure_sql_table_dataset_py3 import AzureSqlTableDataset - from .azure_table_dataset_py3 import AzureTableDataset - from .azure_blob_dataset_py3 import AzureBlobDataset - from .hdfs_location_py3 import HdfsLocation - from .http_server_location_py3 import HttpServerLocation - from .sftp_location_py3 import SftpLocation - from .ftp_server_location_py3 import FtpServerLocation - from .file_server_location_py3 import FileServerLocation - from .amazon_s3_location_py3 import AmazonS3Location - from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation - from .azure_blob_fs_location_py3 import AzureBlobFSLocation - from .azure_blob_storage_location_py3 import AzureBlobStorageLocation - from .dataset_location_py3 import DatasetLocation - from .binary_dataset_py3 import BinaryDataset - from .json_dataset_py3 import JsonDataset - from .delimited_text_dataset_py3 import DelimitedTextDataset - from .parquet_dataset_py3 import ParquetDataset - from .avro_dataset_py3 import AvroDataset - from .amazon_s3_dataset_py3 import AmazonS3Dataset - from .activity_policy_py3 import ActivityPolicy - from .azure_function_activity_py3 import AzureFunctionActivity - from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity - from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity - from .databricks_notebook_activity_py3 import DatabricksNotebookActivity - from .data_lake_analytics_usql_activity_py3 import DataLakeAnalyticsUSQLActivity - from .azure_ml_update_resource_activity_py3 import AzureMLUpdateResourceActivity - from .azure_ml_web_service_file_py3 import AzureMLWebServiceFile - from .azure_ml_batch_execution_activity_py3 import AzureMLBatchExecutionActivity - from .get_metadata_activity_py3 import GetMetadataActivity - from .web_activity_authentication_py3 import WebActivityAuthentication - from .web_activity_py3 import WebActivity - from .redshift_unload_settings_py3 import RedshiftUnloadSettings - from .amazon_redshift_source_py3 import AmazonRedshiftSource - from .google_ad_words_source_py3 import GoogleAdWordsSource - from .oracle_service_cloud_source_py3 import OracleServiceCloudSource - from .dynamics_ax_source_py3 import DynamicsAXSource - from .responsys_source_py3 import ResponsysSource - from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource - from .vertica_source_py3 import VerticaSource - from .netezza_partition_settings_py3 import NetezzaPartitionSettings - from .netezza_source_py3 import NetezzaSource - from .zoho_source_py3 import ZohoSource - from .xero_source_py3 import XeroSource - from .square_source_py3 import SquareSource - from .spark_source_py3 import SparkSource - from .shopify_source_py3 import ShopifySource - from .service_now_source_py3 import ServiceNowSource - from .quick_books_source_py3 import QuickBooksSource - from .presto_source_py3 import PrestoSource - from .phoenix_source_py3 import PhoenixSource - from .paypal_source_py3 import PaypalSource - from .marketo_source_py3 import MarketoSource - from .azure_maria_db_source_py3 import AzureMariaDBSource - from .maria_db_source_py3 import MariaDBSource - from .magento_source_py3 import MagentoSource - from .jira_source_py3 import JiraSource - from .impala_source_py3 import ImpalaSource - from .hubspot_source_py3 import HubspotSource - from .hive_source_py3 import HiveSource - from .hbase_source_py3 import HBaseSource - from .greenplum_source_py3 import GreenplumSource - from .google_big_query_source_py3 import GoogleBigQuerySource - from .eloqua_source_py3 import EloquaSource - from .drill_source_py3 import DrillSource - from .couchbase_source_py3 import CouchbaseSource - from .concur_source_py3 import ConcurSource - from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource - from .amazon_mws_source_py3 import AmazonMWSSource - from .http_source_py3 import HttpSource - from .azure_blob_fs_source_py3 import AzureBlobFSSource - from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource - from .office365_source_py3 import Office365Source - from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties - from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource - from .mongo_db_v2_source_py3 import MongoDbV2Source - from .mongo_db_source_py3 import MongoDbSource - from .cassandra_source_py3 import CassandraSource - from .web_source_py3 import WebSource - from .teradata_partition_settings_py3 import TeradataPartitionSettings - from .teradata_source_py3 import TeradataSource - from .oracle_partition_settings_py3 import OraclePartitionSettings - from .oracle_source_py3 import OracleSource - from .azure_data_explorer_source_py3 import AzureDataExplorerSource - from .azure_my_sql_source_py3 import AzureMySqlSource - from .distcp_settings_py3 import DistcpSettings - from .hdfs_source_py3 import HdfsSource - from .file_system_source_py3 import FileSystemSource - from .sql_dw_source_py3 import SqlDWSource - from .stored_procedure_parameter_py3 import StoredProcedureParameter - from .sql_mi_source_py3 import SqlMISource - from .azure_sql_source_py3 import AzureSqlSource - from .sql_server_source_py3 import SqlServerSource - from .sql_source_py3 import SqlSource - from .rest_source_py3 import RestSource - from .sap_table_partition_settings_py3 import SapTablePartitionSettings - from .sap_table_source_py3 import SapTableSource - from .sap_open_hub_source_py3 import SapOpenHubSource - from .sap_hana_source_py3 import SapHanaSource - from .sap_ecc_source_py3 import SapEccSource - from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource - from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource - from .salesforce_source_py3 import SalesforceSource - from .odata_source_py3 import ODataSource - from .sap_bw_source_py3 import SapBwSource - from .sybase_source_py3 import SybaseSource - from .postgre_sql_source_py3 import PostgreSqlSource - from .my_sql_source_py3 import MySqlSource - from .odbc_source_py3 import OdbcSource - from .db2_source_py3 import Db2Source - from .microsoft_access_source_py3 import MicrosoftAccessSource - from .informix_source_py3 import InformixSource - from .relational_source_py3 import RelationalSource - from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource - from .dynamics_crm_source_py3 import DynamicsCrmSource - from .dynamics_source_py3 import DynamicsSource - from .document_db_collection_source_py3 import DocumentDbCollectionSource - from .blob_source_py3 import BlobSource - from .azure_table_source_py3 import AzureTableSource - from .hdfs_read_settings_py3 import HdfsReadSettings - from .http_read_settings_py3 import HttpReadSettings - from .sftp_read_settings_py3 import SftpReadSettings - from .ftp_read_settings_py3 import FtpReadSettings - from .file_server_read_settings_py3 import FileServerReadSettings - from .amazon_s3_read_settings_py3 import AmazonS3ReadSettings - from .azure_data_lake_store_read_settings_py3 import AzureDataLakeStoreReadSettings - from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings - from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings - from .store_read_settings_py3 import StoreReadSettings - from .binary_source_py3 import BinarySource - from .json_source_py3 import JsonSource - from .format_read_settings_py3 import FormatReadSettings - from .delimited_text_read_settings_py3 import DelimitedTextReadSettings - from .delimited_text_source_py3 import DelimitedTextSource - from .parquet_source_py3 import ParquetSource - from .avro_source_py3 import AvroSource - from .copy_source_py3 import CopySource - from .lookup_activity_py3 import LookupActivity - from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity - from .log_storage_settings_py3 import LogStorageSettings - from .delete_activity_py3 import DeleteActivity - from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity - from .custom_activity_reference_object_py3 import CustomActivityReferenceObject - from .custom_activity_py3 import CustomActivity - from .ssis_access_credential_py3 import SSISAccessCredential - from .ssis_log_location_py3 import SSISLogLocation - from .ssis_property_override_py3 import SSISPropertyOverride - from .ssis_execution_parameter_py3 import SSISExecutionParameter - from .ssis_execution_credential_py3 import SSISExecutionCredential - from .ssis_package_location_py3 import SSISPackageLocation - from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity - from .hd_insight_spark_activity_py3 import HDInsightSparkActivity - from .hd_insight_streaming_activity_py3 import HDInsightStreamingActivity - from .hd_insight_map_reduce_activity_py3 import HDInsightMapReduceActivity - from .hd_insight_pig_activity_py3 import HDInsightPigActivity - from .hd_insight_hive_activity_py3 import HDInsightHiveActivity - from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings - from .staging_settings_py3 import StagingSettings - from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink - from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink - from .salesforce_sink_py3 import SalesforceSink - from .azure_data_explorer_sink_py3 import AzureDataExplorerSink - from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink - from .dynamics_crm_sink_py3 import DynamicsCrmSink - from .dynamics_sink_py3 import DynamicsSink - from .microsoft_access_sink_py3 import MicrosoftAccessSink - from .informix_sink_py3 import InformixSink - from .odbc_sink_py3 import OdbcSink - from .azure_search_index_sink_py3 import AzureSearchIndexSink - from .azure_blob_fs_sink_py3 import AzureBlobFSSink - from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink - from .oracle_sink_py3 import OracleSink - from .polybase_settings_py3 import PolybaseSettings - from .sql_dw_sink_py3 import SqlDWSink - from .sql_mi_sink_py3 import SqlMISink - from .azure_sql_sink_py3 import AzureSqlSink - from .sql_server_sink_py3 import SqlServerSink - from .sql_sink_py3 import SqlSink - from .document_db_collection_sink_py3 import DocumentDbCollectionSink - from .file_system_sink_py3 import FileSystemSink - from .blob_sink_py3 import BlobSink - from .file_server_write_settings_py3 import FileServerWriteSettings - from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings - from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings - from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings - from .store_write_settings_py3 import StoreWriteSettings - from .binary_sink_py3 import BinarySink - from .parquet_sink_py3 import ParquetSink - from .json_write_settings_py3 import JsonWriteSettings - from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings - from .format_write_settings_py3 import FormatWriteSettings - from .avro_write_settings_py3 import AvroWriteSettings - from .avro_sink_py3 import AvroSink - from .azure_table_sink_py3 import AzureTableSink - from .azure_queue_sink_py3 import AzureQueueSink - from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink - from .azure_my_sql_sink_py3 import AzureMySqlSink - from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink - from .json_sink_py3 import JsonSink - from .delimited_text_sink_py3 import DelimitedTextSink - from .copy_sink_py3 import CopySink - from .copy_activity_py3 import CopyActivity - from .execution_activity_py3 import ExecutionActivity - from .web_hook_activity_py3 import WebHookActivity - from .append_variable_activity_py3 import AppendVariableActivity - from .set_variable_activity_py3 import SetVariableActivity - from .filter_activity_py3 import FilterActivity - from .validation_activity_py3 import ValidationActivity - from .until_activity_py3 import UntilActivity - from .wait_activity_py3 import WaitActivity - from .for_each_activity_py3 import ForEachActivity - from .if_condition_activity_py3 import IfConditionActivity - from .execute_pipeline_activity_py3 import ExecutePipelineActivity - from .control_activity_py3 import ControlActivity - from .linked_integration_runtime_py3 import LinkedIntegrationRuntime - from .self_hosted_integration_runtime_node_py3 import SelfHostedIntegrationRuntimeNode - from .self_hosted_integration_runtime_status_py3 import SelfHostedIntegrationRuntimeStatus - from .managed_integration_runtime_operation_result_py3 import ManagedIntegrationRuntimeOperationResult - from .managed_integration_runtime_error_py3 import ManagedIntegrationRuntimeError - from .managed_integration_runtime_node_py3 import ManagedIntegrationRuntimeNode - from .managed_integration_runtime_status_py3 import ManagedIntegrationRuntimeStatus - from .linked_integration_runtime_rbac_authorization_py3 import LinkedIntegrationRuntimeRbacAuthorization - from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization - from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType - from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime - from .entity_reference_py3 import EntityReference - from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties - from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties - from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo - from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties - from .integration_runtime_vnet_properties_py3 import IntegrationRuntimeVNetProperties - from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties - from .managed_integration_runtime_py3 import ManagedIntegrationRuntime - from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress - from .ssis_variable_py3 import SsisVariable - from .ssis_environment_py3 import SsisEnvironment - from .ssis_parameter_py3 import SsisParameter - from .ssis_package_py3 import SsisPackage - from .ssis_environment_reference_py3 import SsisEnvironmentReference - from .ssis_project_py3 import SsisProject - from .ssis_folder_py3 import SsisFolder - from .ssis_object_metadata_py3 import SsisObjectMetadata - from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse - from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData - from .integration_runtime_monitoring_data_py3 import IntegrationRuntimeMonitoringData - from .integration_runtime_auth_keys_py3 import IntegrationRuntimeAuthKeys - from .integration_runtime_regenerate_key_parameters_py3 import IntegrationRuntimeRegenerateKeyParameters - from .integration_runtime_connection_info_py3 import IntegrationRuntimeConnectionInfo -except (SyntaxError, ImportError): - from .resource import Resource - from .sub_resource import SubResource - from .expression import Expression - from .secure_string import SecureString - from .linked_service_reference import LinkedServiceReference - from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference - from .secret_base import SecretBase - from .factory_identity import FactoryIdentity - from .factory_repo_configuration import FactoryRepoConfiguration - from .factory import Factory - from .integration_runtime import IntegrationRuntime - from .integration_runtime_resource import IntegrationRuntimeResource - from .integration_runtime_reference import IntegrationRuntimeReference - from .integration_runtime_status import IntegrationRuntimeStatus - from .integration_runtime_status_response import IntegrationRuntimeStatusResponse - from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse - from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest - from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest - from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest - from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest - from .parameter_specification import ParameterSpecification - from .linked_service import LinkedService - from .linked_service_resource import LinkedServiceResource - from .dataset_folder import DatasetFolder - from .dataset import Dataset - from .dataset_resource import DatasetResource - from .activity_dependency import ActivityDependency - from .user_property import UserProperty - from .activity import Activity - from .variable_specification import VariableSpecification - from .pipeline_folder import PipelineFolder - from .pipeline_resource import PipelineResource - from .trigger import Trigger - from .trigger_resource import TriggerResource - from .create_run_response import CreateRunResponse - from .trigger_subscription_operation_status import TriggerSubscriptionOperationStatus - from .factory_vsts_configuration import FactoryVSTSConfiguration - from .factory_git_hub_configuration import FactoryGitHubConfiguration - from .factory_repo_update import FactoryRepoUpdate - from .git_hub_access_token_request import GitHubAccessTokenRequest - from .git_hub_access_token_response import GitHubAccessTokenResponse - from .user_access_policy import UserAccessPolicy - from .access_policy_response import AccessPolicyResponse - from .pipeline_reference import PipelineReference - from .trigger_pipeline_reference import TriggerPipelineReference - from .factory_update_parameters import FactoryUpdateParameters - from .dataset_reference import DatasetReference - from .run_query_filter import RunQueryFilter - from .run_query_order_by import RunQueryOrderBy - from .run_filter_parameters import RunFilterParameters - from .pipeline_run_invoked_by import PipelineRunInvokedBy - from .pipeline_run import PipelineRun - from .pipeline_runs_query_response import PipelineRunsQueryResponse - from .activity_run import ActivityRun - from .activity_runs_query_response import ActivityRunsQueryResponse - from .trigger_run import TriggerRun - from .trigger_runs_query_response import TriggerRunsQueryResponse - from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters - from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger - from .rerun_trigger_resource import RerunTriggerResource - from .operation_display import OperationDisplay - from .operation_log_specification import OperationLogSpecification - from .operation_metric_availability import OperationMetricAvailability - from .operation_metric_dimension import OperationMetricDimension - from .operation_metric_specification import OperationMetricSpecification - from .operation_service_specification import OperationServiceSpecification - from .operation import Operation - from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest - from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse - from .exposure_control_request import ExposureControlRequest - from .exposure_control_response import ExposureControlResponse - from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference - from .trigger_reference import TriggerReference - from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference - from .trigger_dependency_reference import TriggerDependencyReference - from .dependency_reference import DependencyReference - from .retry_policy import RetryPolicy - from .tumbling_window_trigger import TumblingWindowTrigger - from .blob_events_trigger import BlobEventsTrigger - from .blob_trigger import BlobTrigger - from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence - from .recurrence_schedule import RecurrenceSchedule - from .schedule_trigger_recurrence import ScheduleTriggerRecurrence - from .schedule_trigger import ScheduleTrigger - from .multiple_pipeline_trigger import MultiplePipelineTrigger - from .azure_function_linked_service import AzureFunctionLinkedService - from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService - from .sap_table_linked_service import SapTableLinkedService - from .google_ad_words_linked_service import GoogleAdWordsLinkedService - from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService - from .dynamics_ax_linked_service import DynamicsAXLinkedService - from .responsys_linked_service import ResponsysLinkedService - from .azure_databricks_linked_service import AzureDatabricksLinkedService - from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService - from .script_action import ScriptAction - from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService - from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService - from .netezza_linked_service import NetezzaLinkedService - from .vertica_linked_service import VerticaLinkedService - from .zoho_linked_service import ZohoLinkedService - from .xero_linked_service import XeroLinkedService - from .square_linked_service import SquareLinkedService - from .spark_linked_service import SparkLinkedService - from .shopify_linked_service import ShopifyLinkedService - from .service_now_linked_service import ServiceNowLinkedService - from .quick_books_linked_service import QuickBooksLinkedService - from .presto_linked_service import PrestoLinkedService - from .phoenix_linked_service import PhoenixLinkedService - from .paypal_linked_service import PaypalLinkedService - from .marketo_linked_service import MarketoLinkedService - from .azure_maria_db_linked_service import AzureMariaDBLinkedService - from .maria_db_linked_service import MariaDBLinkedService - from .magento_linked_service import MagentoLinkedService - from .jira_linked_service import JiraLinkedService - from .impala_linked_service import ImpalaLinkedService - from .hubspot_linked_service import HubspotLinkedService - from .hive_linked_service import HiveLinkedService - from .hbase_linked_service import HBaseLinkedService - from .greenplum_linked_service import GreenplumLinkedService - from .google_big_query_linked_service import GoogleBigQueryLinkedService - from .eloqua_linked_service import EloquaLinkedService - from .drill_linked_service import DrillLinkedService - from .couchbase_linked_service import CouchbaseLinkedService - from .concur_linked_service import ConcurLinkedService - from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService - from .amazon_mws_linked_service import AmazonMWSLinkedService - from .sap_hana_linked_service import SapHanaLinkedService - from .sap_bw_linked_service import SapBWLinkedService - from .sftp_server_linked_service import SftpServerLinkedService - from .ftp_server_linked_service import FtpServerLinkedService - from .http_linked_service import HttpLinkedService - from .azure_search_linked_service import AzureSearchLinkedService - from .custom_data_source_linked_service import CustomDataSourceLinkedService - from .amazon_redshift_linked_service import AmazonRedshiftLinkedService - from .amazon_s3_linked_service import AmazonS3LinkedService - from .rest_service_linked_service import RestServiceLinkedService - from .sap_open_hub_linked_service import SapOpenHubLinkedService - from .sap_ecc_linked_service import SapEccLinkedService - from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService - from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService - from .salesforce_linked_service import SalesforceLinkedService - from .office365_linked_service import Office365LinkedService - from .azure_blob_fs_linked_service import AzureBlobFSLinkedService - from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService - from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService - from .mongo_db_v2_linked_service import MongoDbV2LinkedService - from .mongo_db_linked_service import MongoDbLinkedService - from .cassandra_linked_service import CassandraLinkedService - from .web_client_certificate_authentication import WebClientCertificateAuthentication - from .web_basic_authentication import WebBasicAuthentication - from .web_anonymous_authentication import WebAnonymousAuthentication - from .web_linked_service_type_properties import WebLinkedServiceTypeProperties - from .web_linked_service import WebLinkedService - from .odata_linked_service import ODataLinkedService - from .hdfs_linked_service import HdfsLinkedService - from .microsoft_access_linked_service import MicrosoftAccessLinkedService - from .informix_linked_service import InformixLinkedService - from .odbc_linked_service import OdbcLinkedService - from .azure_ml_linked_service import AzureMLLinkedService - from .teradata_linked_service import TeradataLinkedService - from .db2_linked_service import Db2LinkedService - from .sybase_linked_service import SybaseLinkedService - from .postgre_sql_linked_service import PostgreSqlLinkedService - from .my_sql_linked_service import MySqlLinkedService - from .azure_my_sql_linked_service import AzureMySqlLinkedService - from .oracle_linked_service import OracleLinkedService - from .file_server_linked_service import FileServerLinkedService - from .hd_insight_linked_service import HDInsightLinkedService - from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService - from .dynamics_crm_linked_service import DynamicsCrmLinkedService - from .dynamics_linked_service import DynamicsLinkedService - from .cosmos_db_linked_service import CosmosDbLinkedService - from .azure_key_vault_linked_service import AzureKeyVaultLinkedService - from .azure_batch_linked_service import AzureBatchLinkedService - from .azure_sql_mi_linked_service import AzureSqlMILinkedService - from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService - from .sql_server_linked_service import SqlServerLinkedService - from .azure_sql_dw_linked_service import AzureSqlDWLinkedService - from .azure_table_storage_linked_service import AzureTableStorageLinkedService - from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService - from .azure_storage_linked_service import AzureStorageLinkedService - from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset - from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset - from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset - from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset - from .responsys_object_dataset import ResponsysObjectDataset - from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset - from .vertica_table_dataset import VerticaTableDataset - from .netezza_table_dataset import NetezzaTableDataset - from .zoho_object_dataset import ZohoObjectDataset - from .xero_object_dataset import XeroObjectDataset - from .square_object_dataset import SquareObjectDataset - from .spark_object_dataset import SparkObjectDataset - from .shopify_object_dataset import ShopifyObjectDataset - from .service_now_object_dataset import ServiceNowObjectDataset - from .quick_books_object_dataset import QuickBooksObjectDataset - from .presto_object_dataset import PrestoObjectDataset - from .phoenix_object_dataset import PhoenixObjectDataset - from .paypal_object_dataset import PaypalObjectDataset - from .marketo_object_dataset import MarketoObjectDataset - from .azure_maria_db_table_dataset import AzureMariaDBTableDataset - from .maria_db_table_dataset import MariaDBTableDataset - from .magento_object_dataset import MagentoObjectDataset - from .jira_object_dataset import JiraObjectDataset - from .impala_object_dataset import ImpalaObjectDataset - from .hubspot_object_dataset import HubspotObjectDataset - from .hive_object_dataset import HiveObjectDataset - from .hbase_object_dataset import HBaseObjectDataset - from .greenplum_table_dataset import GreenplumTableDataset - from .google_big_query_object_dataset import GoogleBigQueryObjectDataset - from .eloqua_object_dataset import EloquaObjectDataset - from .drill_table_dataset import DrillTableDataset - from .couchbase_table_dataset import CouchbaseTableDataset - from .concur_object_dataset import ConcurObjectDataset - from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset - from .amazon_mws_object_dataset import AmazonMWSObjectDataset - from .dataset_zip_deflate_compression import DatasetZipDeflateCompression - from .dataset_deflate_compression import DatasetDeflateCompression - from .dataset_gzip_compression import DatasetGZipCompression - from .dataset_bzip2_compression import DatasetBZip2Compression - from .dataset_compression import DatasetCompression - from .parquet_format import ParquetFormat - from .orc_format import OrcFormat - from .avro_format import AvroFormat - from .json_format import JsonFormat - from .text_format import TextFormat - from .dataset_storage_format import DatasetStorageFormat - from .http_dataset import HttpDataset - from .azure_search_index_dataset import AzureSearchIndexDataset - from .web_table_dataset import WebTableDataset - from .sap_table_resource_dataset import SapTableResourceDataset - from .rest_resource_dataset import RestResourceDataset - from .sql_server_table_dataset import SqlServerTableDataset - from .sap_open_hub_table_dataset import SapOpenHubTableDataset - from .sap_hana_table_dataset import SapHanaTableDataset - from .sap_ecc_resource_dataset import SapEccResourceDataset - from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset - from .sap_bw_cube_dataset import SapBwCubeDataset - from .sybase_table_dataset import SybaseTableDataset - from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset - from .salesforce_object_dataset import SalesforceObjectDataset - from .microsoft_access_table_dataset import MicrosoftAccessTableDataset - from .postgre_sql_table_dataset import PostgreSqlTableDataset - from .my_sql_table_dataset import MySqlTableDataset - from .odbc_table_dataset import OdbcTableDataset - from .informix_table_dataset import InformixTableDataset - from .relational_table_dataset import RelationalTableDataset - from .db2_table_dataset import Db2TableDataset - from .amazon_redshift_table_dataset import AmazonRedshiftTableDataset - from .azure_my_sql_table_dataset import AzureMySqlTableDataset - from .teradata_table_dataset import TeradataTableDataset - from .oracle_table_dataset import OracleTableDataset - from .odata_resource_dataset import ODataResourceDataset - from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset - from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset - from .mongo_db_collection_dataset import MongoDbCollectionDataset - from .file_share_dataset import FileShareDataset - from .office365_dataset import Office365Dataset - from .azure_blob_fs_dataset import AzureBlobFSDataset - from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset - from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset - from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset - from .dynamics_entity_dataset import DynamicsEntityDataset - from .document_db_collection_dataset import DocumentDbCollectionDataset - from .custom_dataset import CustomDataset - from .cassandra_table_dataset import CassandraTableDataset - from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset - from .azure_sql_mi_table_dataset import AzureSqlMITableDataset - from .azure_sql_table_dataset import AzureSqlTableDataset - from .azure_table_dataset import AzureTableDataset - from .azure_blob_dataset import AzureBlobDataset - from .hdfs_location import HdfsLocation - from .http_server_location import HttpServerLocation - from .sftp_location import SftpLocation - from .ftp_server_location import FtpServerLocation - from .file_server_location import FileServerLocation - from .amazon_s3_location import AmazonS3Location - from .azure_data_lake_store_location import AzureDataLakeStoreLocation - from .azure_blob_fs_location import AzureBlobFSLocation - from .azure_blob_storage_location import AzureBlobStorageLocation - from .dataset_location import DatasetLocation - from .binary_dataset import BinaryDataset - from .json_dataset import JsonDataset - from .delimited_text_dataset import DelimitedTextDataset - from .parquet_dataset import ParquetDataset - from .avro_dataset import AvroDataset - from .amazon_s3_dataset import AmazonS3Dataset - from .activity_policy import ActivityPolicy - from .azure_function_activity import AzureFunctionActivity - from .databricks_spark_python_activity import DatabricksSparkPythonActivity - from .databricks_spark_jar_activity import DatabricksSparkJarActivity - from .databricks_notebook_activity import DatabricksNotebookActivity - from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity - from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity - from .azure_ml_web_service_file import AzureMLWebServiceFile - from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity - from .get_metadata_activity import GetMetadataActivity - from .web_activity_authentication import WebActivityAuthentication - from .web_activity import WebActivity - from .redshift_unload_settings import RedshiftUnloadSettings - from .amazon_redshift_source import AmazonRedshiftSource - from .google_ad_words_source import GoogleAdWordsSource - from .oracle_service_cloud_source import OracleServiceCloudSource - from .dynamics_ax_source import DynamicsAXSource - from .responsys_source import ResponsysSource - from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource - from .vertica_source import VerticaSource - from .netezza_partition_settings import NetezzaPartitionSettings - from .netezza_source import NetezzaSource - from .zoho_source import ZohoSource - from .xero_source import XeroSource - from .square_source import SquareSource - from .spark_source import SparkSource - from .shopify_source import ShopifySource - from .service_now_source import ServiceNowSource - from .quick_books_source import QuickBooksSource - from .presto_source import PrestoSource - from .phoenix_source import PhoenixSource - from .paypal_source import PaypalSource - from .marketo_source import MarketoSource - from .azure_maria_db_source import AzureMariaDBSource - from .maria_db_source import MariaDBSource - from .magento_source import MagentoSource - from .jira_source import JiraSource - from .impala_source import ImpalaSource - from .hubspot_source import HubspotSource - from .hive_source import HiveSource - from .hbase_source import HBaseSource - from .greenplum_source import GreenplumSource - from .google_big_query_source import GoogleBigQuerySource - from .eloqua_source import EloquaSource - from .drill_source import DrillSource - from .couchbase_source import CouchbaseSource - from .concur_source import ConcurSource - from .azure_postgre_sql_source import AzurePostgreSqlSource - from .amazon_mws_source import AmazonMWSSource - from .http_source import HttpSource - from .azure_blob_fs_source import AzureBlobFSSource - from .azure_data_lake_store_source import AzureDataLakeStoreSource - from .office365_source import Office365Source - from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties - from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource - from .mongo_db_v2_source import MongoDbV2Source - from .mongo_db_source import MongoDbSource - from .cassandra_source import CassandraSource - from .web_source import WebSource - from .teradata_partition_settings import TeradataPartitionSettings - from .teradata_source import TeradataSource - from .oracle_partition_settings import OraclePartitionSettings - from .oracle_source import OracleSource - from .azure_data_explorer_source import AzureDataExplorerSource - from .azure_my_sql_source import AzureMySqlSource - from .distcp_settings import DistcpSettings - from .hdfs_source import HdfsSource - from .file_system_source import FileSystemSource - from .sql_dw_source import SqlDWSource - from .stored_procedure_parameter import StoredProcedureParameter - from .sql_mi_source import SqlMISource - from .azure_sql_source import AzureSqlSource - from .sql_server_source import SqlServerSource - from .sql_source import SqlSource - from .rest_source import RestSource - from .sap_table_partition_settings import SapTablePartitionSettings - from .sap_table_source import SapTableSource - from .sap_open_hub_source import SapOpenHubSource - from .sap_hana_source import SapHanaSource - from .sap_ecc_source import SapEccSource - from .sap_cloud_for_customer_source import SapCloudForCustomerSource - from .salesforce_service_cloud_source import SalesforceServiceCloudSource - from .salesforce_source import SalesforceSource - from .odata_source import ODataSource - from .sap_bw_source import SapBwSource - from .sybase_source import SybaseSource - from .postgre_sql_source import PostgreSqlSource - from .my_sql_source import MySqlSource - from .odbc_source import OdbcSource - from .db2_source import Db2Source - from .microsoft_access_source import MicrosoftAccessSource - from .informix_source import InformixSource - from .relational_source import RelationalSource - from .common_data_service_for_apps_source import CommonDataServiceForAppsSource - from .dynamics_crm_source import DynamicsCrmSource - from .dynamics_source import DynamicsSource - from .document_db_collection_source import DocumentDbCollectionSource - from .blob_source import BlobSource - from .azure_table_source import AzureTableSource - from .hdfs_read_settings import HdfsReadSettings - from .http_read_settings import HttpReadSettings - from .sftp_read_settings import SftpReadSettings - from .ftp_read_settings import FtpReadSettings - from .file_server_read_settings import FileServerReadSettings - from .amazon_s3_read_settings import AmazonS3ReadSettings - from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings - from .azure_blob_fs_read_settings import AzureBlobFSReadSettings - from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings - from .store_read_settings import StoreReadSettings - from .binary_source import BinarySource - from .json_source import JsonSource - from .format_read_settings import FormatReadSettings - from .delimited_text_read_settings import DelimitedTextReadSettings - from .delimited_text_source import DelimitedTextSource - from .parquet_source import ParquetSource - from .avro_source import AvroSource - from .copy_source import CopySource - from .lookup_activity import LookupActivity - from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity - from .log_storage_settings import LogStorageSettings - from .delete_activity import DeleteActivity - from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity - from .custom_activity_reference_object import CustomActivityReferenceObject - from .custom_activity import CustomActivity - from .ssis_access_credential import SSISAccessCredential - from .ssis_log_location import SSISLogLocation - from .ssis_property_override import SSISPropertyOverride - from .ssis_execution_parameter import SSISExecutionParameter - from .ssis_execution_credential import SSISExecutionCredential - from .ssis_package_location import SSISPackageLocation - from .execute_ssis_package_activity import ExecuteSSISPackageActivity - from .hd_insight_spark_activity import HDInsightSparkActivity - from .hd_insight_streaming_activity import HDInsightStreamingActivity - from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity - from .hd_insight_pig_activity import HDInsightPigActivity - from .hd_insight_hive_activity import HDInsightHiveActivity - from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings - from .staging_settings import StagingSettings - from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink - from .salesforce_service_cloud_sink import SalesforceServiceCloudSink - from .salesforce_sink import SalesforceSink - from .azure_data_explorer_sink import AzureDataExplorerSink - from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink - from .dynamics_crm_sink import DynamicsCrmSink - from .dynamics_sink import DynamicsSink - from .microsoft_access_sink import MicrosoftAccessSink - from .informix_sink import InformixSink - from .odbc_sink import OdbcSink - from .azure_search_index_sink import AzureSearchIndexSink - from .azure_blob_fs_sink import AzureBlobFSSink - from .azure_data_lake_store_sink import AzureDataLakeStoreSink - from .oracle_sink import OracleSink - from .polybase_settings import PolybaseSettings - from .sql_dw_sink import SqlDWSink - from .sql_mi_sink import SqlMISink - from .azure_sql_sink import AzureSqlSink - from .sql_server_sink import SqlServerSink - from .sql_sink import SqlSink - from .document_db_collection_sink import DocumentDbCollectionSink - from .file_system_sink import FileSystemSink - from .blob_sink import BlobSink - from .file_server_write_settings import FileServerWriteSettings - from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings - from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings - from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings - from .store_write_settings import StoreWriteSettings - from .binary_sink import BinarySink - from .parquet_sink import ParquetSink - from .json_write_settings import JsonWriteSettings - from .delimited_text_write_settings import DelimitedTextWriteSettings - from .format_write_settings import FormatWriteSettings - from .avro_write_settings import AvroWriteSettings - from .avro_sink import AvroSink - from .azure_table_sink import AzureTableSink - from .azure_queue_sink import AzureQueueSink - from .sap_cloud_for_customer_sink import SapCloudForCustomerSink - from .azure_my_sql_sink import AzureMySqlSink - from .azure_postgre_sql_sink import AzurePostgreSqlSink - from .json_sink import JsonSink - from .delimited_text_sink import DelimitedTextSink - from .copy_sink import CopySink - from .copy_activity import CopyActivity - from .execution_activity import ExecutionActivity - from .web_hook_activity import WebHookActivity - from .append_variable_activity import AppendVariableActivity - from .set_variable_activity import SetVariableActivity - from .filter_activity import FilterActivity - from .validation_activity import ValidationActivity - from .until_activity import UntilActivity - from .wait_activity import WaitActivity - from .for_each_activity import ForEachActivity - from .if_condition_activity import IfConditionActivity - from .execute_pipeline_activity import ExecutePipelineActivity - from .control_activity import ControlActivity - from .linked_integration_runtime import LinkedIntegrationRuntime - from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode - from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus - from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult - from .managed_integration_runtime_error import ManagedIntegrationRuntimeError - from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode - from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus - from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization - from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization - from .linked_integration_runtime_type import LinkedIntegrationRuntimeType - from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime - from .entity_reference import EntityReference - from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties - from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties - from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo - from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties - from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties - from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties - from .managed_integration_runtime import ManagedIntegrationRuntime - from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress - from .ssis_variable import SsisVariable - from .ssis_environment import SsisEnvironment - from .ssis_parameter import SsisParameter - from .ssis_package import SsisPackage - from .ssis_environment_reference import SsisEnvironmentReference - from .ssis_project import SsisProject - from .ssis_folder import SsisFolder - from .ssis_object_metadata import SsisObjectMetadata - from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse - from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData - from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData - from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys - from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters - from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo +from .resource import Resource +from .sub_resource import SubResource +from .expression import Expression +from .secure_string import SecureString +from .linked_service_reference import LinkedServiceReference +from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference +from .secret_base import SecretBase +from .factory_identity import FactoryIdentity +from .factory_repo_configuration import FactoryRepoConfiguration +from .factory import Factory +from .integration_runtime import IntegrationRuntime +from .integration_runtime_resource import IntegrationRuntimeResource +from .integration_runtime_reference import IntegrationRuntimeReference +from .integration_runtime_status import IntegrationRuntimeStatus +from .integration_runtime_status_response import IntegrationRuntimeStatusResponse +from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse +from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest +from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest +from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest +from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest +from .parameter_specification import ParameterSpecification +from .linked_service import LinkedService +from .linked_service_resource import LinkedServiceResource +from .dataset_folder import DatasetFolder +from .dataset import Dataset +from .dataset_resource import DatasetResource +from .activity_dependency import ActivityDependency +from .user_property import UserProperty +from .activity import Activity +from .variable_specification import VariableSpecification +from .pipeline_folder import PipelineFolder +from .pipeline_resource import PipelineResource +from .trigger import Trigger +from .trigger_resource import TriggerResource +from .create_run_response import CreateRunResponse +from .trigger_subscription_operation_status import TriggerSubscriptionOperationStatus +from .factory_vsts_configuration import FactoryVSTSConfiguration +from .factory_git_hub_configuration import FactoryGitHubConfiguration +from .factory_repo_update import FactoryRepoUpdate +from .git_hub_access_token_request import GitHubAccessTokenRequest +from .git_hub_access_token_response import GitHubAccessTokenResponse +from .user_access_policy import UserAccessPolicy +from .access_policy_response import AccessPolicyResponse +from .pipeline_reference import PipelineReference +from .trigger_pipeline_reference import TriggerPipelineReference +from .factory_update_parameters import FactoryUpdateParameters +from .dataset_reference import DatasetReference +from .run_query_filter import RunQueryFilter +from .run_query_order_by import RunQueryOrderBy +from .run_filter_parameters import RunFilterParameters +from .pipeline_run_invoked_by import PipelineRunInvokedBy +from .pipeline_run import PipelineRun +from .pipeline_runs_query_response import PipelineRunsQueryResponse +from .activity_run import ActivityRun +from .activity_runs_query_response import ActivityRunsQueryResponse +from .trigger_run import TriggerRun +from .trigger_runs_query_response import TriggerRunsQueryResponse +from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters +from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger +from .rerun_trigger_resource import RerunTriggerResource +from .operation_display import OperationDisplay +from .operation_log_specification import OperationLogSpecification +from .operation_metric_availability import OperationMetricAvailability +from .operation_metric_dimension import OperationMetricDimension +from .operation_metric_specification import OperationMetricSpecification +from .operation_service_specification import OperationServiceSpecification +from .operation import Operation +from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest +from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse +from .exposure_control_request import ExposureControlRequest +from .exposure_control_response import ExposureControlResponse +from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference +from .trigger_reference import TriggerReference +from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference +from .trigger_dependency_reference import TriggerDependencyReference +from .dependency_reference import DependencyReference +from .retry_policy import RetryPolicy +from .tumbling_window_trigger import TumblingWindowTrigger +from .blob_events_trigger import BlobEventsTrigger +from .blob_trigger import BlobTrigger +from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence +from .recurrence_schedule import RecurrenceSchedule +from .schedule_trigger_recurrence import ScheduleTriggerRecurrence +from .schedule_trigger import ScheduleTrigger +from .multiple_pipeline_trigger import MultiplePipelineTrigger +from .azure_function_linked_service import AzureFunctionLinkedService +from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService +from .sap_table_linked_service import SapTableLinkedService +from .google_ad_words_linked_service import GoogleAdWordsLinkedService +from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService +from .dynamics_ax_linked_service import DynamicsAXLinkedService +from .responsys_linked_service import ResponsysLinkedService +from .azure_databricks_linked_service import AzureDatabricksLinkedService +from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService +from .script_action import ScriptAction +from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService +from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService +from .netezza_linked_service import NetezzaLinkedService +from .vertica_linked_service import VerticaLinkedService +from .zoho_linked_service import ZohoLinkedService +from .xero_linked_service import XeroLinkedService +from .square_linked_service import SquareLinkedService +from .spark_linked_service import SparkLinkedService +from .shopify_linked_service import ShopifyLinkedService +from .service_now_linked_service import ServiceNowLinkedService +from .quick_books_linked_service import QuickBooksLinkedService +from .presto_linked_service import PrestoLinkedService +from .phoenix_linked_service import PhoenixLinkedService +from .paypal_linked_service import PaypalLinkedService +from .marketo_linked_service import MarketoLinkedService +from .azure_maria_db_linked_service import AzureMariaDBLinkedService +from .maria_db_linked_service import MariaDBLinkedService +from .magento_linked_service import MagentoLinkedService +from .jira_linked_service import JiraLinkedService +from .impala_linked_service import ImpalaLinkedService +from .hubspot_linked_service import HubspotLinkedService +from .hive_linked_service import HiveLinkedService +from .hbase_linked_service import HBaseLinkedService +from .greenplum_linked_service import GreenplumLinkedService +from .google_big_query_linked_service import GoogleBigQueryLinkedService +from .eloqua_linked_service import EloquaLinkedService +from .drill_linked_service import DrillLinkedService +from .couchbase_linked_service import CouchbaseLinkedService +from .concur_linked_service import ConcurLinkedService +from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService +from .amazon_mws_linked_service import AmazonMWSLinkedService +from .sap_hana_linked_service import SapHanaLinkedService +from .sap_bw_linked_service import SapBWLinkedService +from .sftp_server_linked_service import SftpServerLinkedService +from .ftp_server_linked_service import FtpServerLinkedService +from .http_linked_service import HttpLinkedService +from .azure_search_linked_service import AzureSearchLinkedService +from .custom_data_source_linked_service import CustomDataSourceLinkedService +from .amazon_redshift_linked_service import AmazonRedshiftLinkedService +from .amazon_s3_linked_service import AmazonS3LinkedService +from .rest_service_linked_service import RestServiceLinkedService +from .sap_open_hub_linked_service import SapOpenHubLinkedService +from .sap_ecc_linked_service import SapEccLinkedService +from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService +from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService +from .salesforce_linked_service import SalesforceLinkedService +from .office365_linked_service import Office365LinkedService +from .azure_blob_fs_linked_service import AzureBlobFSLinkedService +from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService +from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService +from .mongo_db_v2_linked_service import MongoDbV2LinkedService +from .mongo_db_linked_service import MongoDbLinkedService +from .cassandra_linked_service import CassandraLinkedService +from .web_client_certificate_authentication import WebClientCertificateAuthentication +from .web_basic_authentication import WebBasicAuthentication +from .web_anonymous_authentication import WebAnonymousAuthentication +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties +from .web_linked_service import WebLinkedService +from .odata_linked_service import ODataLinkedService +from .hdfs_linked_service import HdfsLinkedService +from .microsoft_access_linked_service import MicrosoftAccessLinkedService +from .informix_linked_service import InformixLinkedService +from .odbc_linked_service import OdbcLinkedService +from .azure_ml_linked_service import AzureMLLinkedService +from .teradata_linked_service import TeradataLinkedService +from .db2_linked_service import Db2LinkedService +from .sybase_linked_service import SybaseLinkedService +from .postgre_sql_linked_service import PostgreSqlLinkedService +from .my_sql_linked_service import MySqlLinkedService +from .azure_my_sql_linked_service import AzureMySqlLinkedService +from .oracle_linked_service import OracleLinkedService +from .file_server_linked_service import FileServerLinkedService +from .hd_insight_linked_service import HDInsightLinkedService +from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService +from .dynamics_crm_linked_service import DynamicsCrmLinkedService +from .dynamics_linked_service import DynamicsLinkedService +from .cosmos_db_linked_service import CosmosDbLinkedService +from .azure_key_vault_linked_service import AzureKeyVaultLinkedService +from .azure_batch_linked_service import AzureBatchLinkedService +from .azure_sql_mi_linked_service import AzureSqlMILinkedService +from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService +from .sql_server_linked_service import SqlServerLinkedService +from .azure_sql_dw_linked_service import AzureSqlDWLinkedService +from .azure_table_storage_linked_service import AzureTableStorageLinkedService +from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService +from .azure_storage_linked_service import AzureStorageLinkedService +from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset +from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset +from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset +from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset +from .responsys_object_dataset import ResponsysObjectDataset +from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset +from .vertica_table_dataset import VerticaTableDataset +from .netezza_table_dataset import NetezzaTableDataset +from .zoho_object_dataset import ZohoObjectDataset +from .xero_object_dataset import XeroObjectDataset +from .square_object_dataset import SquareObjectDataset +from .spark_object_dataset import SparkObjectDataset +from .shopify_object_dataset import ShopifyObjectDataset +from .service_now_object_dataset import ServiceNowObjectDataset +from .quick_books_object_dataset import QuickBooksObjectDataset +from .presto_object_dataset import PrestoObjectDataset +from .phoenix_object_dataset import PhoenixObjectDataset +from .paypal_object_dataset import PaypalObjectDataset +from .marketo_object_dataset import MarketoObjectDataset +from .azure_maria_db_table_dataset import AzureMariaDBTableDataset +from .maria_db_table_dataset import MariaDBTableDataset +from .magento_object_dataset import MagentoObjectDataset +from .jira_object_dataset import JiraObjectDataset +from .impala_object_dataset import ImpalaObjectDataset +from .hubspot_object_dataset import HubspotObjectDataset +from .hive_object_dataset import HiveObjectDataset +from .hbase_object_dataset import HBaseObjectDataset +from .greenplum_table_dataset import GreenplumTableDataset +from .google_big_query_object_dataset import GoogleBigQueryObjectDataset +from .eloqua_object_dataset import EloquaObjectDataset +from .drill_table_dataset import DrillTableDataset +from .couchbase_table_dataset import CouchbaseTableDataset +from .concur_object_dataset import ConcurObjectDataset +from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset +from .amazon_mws_object_dataset import AmazonMWSObjectDataset +from .dataset_zip_deflate_compression import DatasetZipDeflateCompression +from .dataset_deflate_compression import DatasetDeflateCompression +from .dataset_gzip_compression import DatasetGZipCompression +from .dataset_bzip2_compression import DatasetBZip2Compression +from .dataset_compression import DatasetCompression +from .parquet_format import ParquetFormat +from .orc_format import OrcFormat +from .avro_format import AvroFormat +from .json_format import JsonFormat +from .text_format import TextFormat +from .dataset_storage_format import DatasetStorageFormat +from .http_dataset import HttpDataset +from .azure_search_index_dataset import AzureSearchIndexDataset +from .web_table_dataset import WebTableDataset +from .sap_table_resource_dataset import SapTableResourceDataset +from .rest_resource_dataset import RestResourceDataset +from .sql_server_table_dataset import SqlServerTableDataset +from .sap_open_hub_table_dataset import SapOpenHubTableDataset +from .sap_hana_table_dataset import SapHanaTableDataset +from .sap_ecc_resource_dataset import SapEccResourceDataset +from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset +from .sap_bw_cube_dataset import SapBwCubeDataset +from .sybase_table_dataset import SybaseTableDataset +from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset +from .salesforce_object_dataset import SalesforceObjectDataset +from .microsoft_access_table_dataset import MicrosoftAccessTableDataset +from .postgre_sql_table_dataset import PostgreSqlTableDataset +from .my_sql_table_dataset import MySqlTableDataset +from .odbc_table_dataset import OdbcTableDataset +from .informix_table_dataset import InformixTableDataset +from .relational_table_dataset import RelationalTableDataset +from .db2_table_dataset import Db2TableDataset +from .amazon_redshift_table_dataset import AmazonRedshiftTableDataset +from .azure_my_sql_table_dataset import AzureMySqlTableDataset +from .teradata_table_dataset import TeradataTableDataset +from .oracle_table_dataset import OracleTableDataset +from .odata_resource_dataset import ODataResourceDataset +from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset +from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset +from .mongo_db_collection_dataset import MongoDbCollectionDataset +from .file_share_dataset import FileShareDataset +from .office365_dataset import Office365Dataset +from .azure_blob_fs_dataset import AzureBlobFSDataset +from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset +from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset +from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset +from .dynamics_entity_dataset import DynamicsEntityDataset +from .document_db_collection_dataset import DocumentDbCollectionDataset +from .custom_dataset import CustomDataset +from .cassandra_table_dataset import CassandraTableDataset +from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset +from .azure_sql_mi_table_dataset import AzureSqlMITableDataset +from .azure_sql_table_dataset import AzureSqlTableDataset +from .azure_table_dataset import AzureTableDataset +from .azure_blob_dataset import AzureBlobDataset +from .hdfs_location import HdfsLocation +from .http_server_location import HttpServerLocation +from .sftp_location import SftpLocation +from .ftp_server_location import FtpServerLocation +from .file_server_location import FileServerLocation +from .amazon_s3_location import AmazonS3Location +from .azure_data_lake_store_location import AzureDataLakeStoreLocation +from .azure_blob_fs_location import AzureBlobFSLocation +from .azure_blob_storage_location import AzureBlobStorageLocation +from .dataset_location import DatasetLocation +from .binary_dataset import BinaryDataset +from .json_dataset import JsonDataset +from .delimited_text_dataset import DelimitedTextDataset +from .parquet_dataset import ParquetDataset +from .avro_dataset import AvroDataset +from .amazon_s3_dataset import AmazonS3Dataset +from .activity_policy import ActivityPolicy +from .azure_function_activity import AzureFunctionActivity +from .databricks_spark_python_activity import DatabricksSparkPythonActivity +from .databricks_spark_jar_activity import DatabricksSparkJarActivity +from .databricks_notebook_activity import DatabricksNotebookActivity +from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity +from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity +from .azure_ml_web_service_file import AzureMLWebServiceFile +from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity +from .get_metadata_activity import GetMetadataActivity +from .web_activity_authentication import WebActivityAuthentication +from .web_activity import WebActivity +from .redshift_unload_settings import RedshiftUnloadSettings +from .amazon_redshift_source import AmazonRedshiftSource +from .google_ad_words_source import GoogleAdWordsSource +from .oracle_service_cloud_source import OracleServiceCloudSource +from .dynamics_ax_source import DynamicsAXSource +from .responsys_source import ResponsysSource +from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource +from .vertica_source import VerticaSource +from .netezza_partition_settings import NetezzaPartitionSettings +from .netezza_source import NetezzaSource +from .zoho_source import ZohoSource +from .xero_source import XeroSource +from .square_source import SquareSource +from .spark_source import SparkSource +from .shopify_source import ShopifySource +from .service_now_source import ServiceNowSource +from .quick_books_source import QuickBooksSource +from .presto_source import PrestoSource +from .phoenix_source import PhoenixSource +from .paypal_source import PaypalSource +from .marketo_source import MarketoSource +from .azure_maria_db_source import AzureMariaDBSource +from .maria_db_source import MariaDBSource +from .magento_source import MagentoSource +from .jira_source import JiraSource +from .impala_source import ImpalaSource +from .hubspot_source import HubspotSource +from .hive_source import HiveSource +from .hbase_source import HBaseSource +from .greenplum_source import GreenplumSource +from .google_big_query_source import GoogleBigQuerySource +from .eloqua_source import EloquaSource +from .drill_source import DrillSource +from .couchbase_source import CouchbaseSource +from .concur_source import ConcurSource +from .azure_postgre_sql_source import AzurePostgreSqlSource +from .amazon_mws_source import AmazonMWSSource +from .http_source import HttpSource +from .azure_blob_fs_source import AzureBlobFSSource +from .azure_data_lake_store_source import AzureDataLakeStoreSource +from .office365_source import Office365Source +from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties +from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource +from .mongo_db_v2_source import MongoDbV2Source +from .mongo_db_source import MongoDbSource +from .cassandra_source import CassandraSource +from .web_source import WebSource +from .teradata_partition_settings import TeradataPartitionSettings +from .teradata_source import TeradataSource +from .oracle_partition_settings import OraclePartitionSettings +from .oracle_source import OracleSource +from .azure_data_explorer_source import AzureDataExplorerSource +from .azure_my_sql_source import AzureMySqlSource +from .distcp_settings import DistcpSettings +from .hdfs_source import HdfsSource +from .file_system_source import FileSystemSource +from .sql_dw_source import SqlDWSource +from .stored_procedure_parameter import StoredProcedureParameter +from .sql_mi_source import SqlMISource +from .azure_sql_source import AzureSqlSource +from .sql_server_source import SqlServerSource +from .sql_source import SqlSource +from .rest_source import RestSource +from .sap_table_partition_settings import SapTablePartitionSettings +from .sap_table_source import SapTableSource +from .sap_open_hub_source import SapOpenHubSource +from .sap_hana_source import SapHanaSource +from .sap_ecc_source import SapEccSource +from .sap_cloud_for_customer_source import SapCloudForCustomerSource +from .salesforce_service_cloud_source import SalesforceServiceCloudSource +from .salesforce_source import SalesforceSource +from .odata_source import ODataSource +from .sap_bw_source import SapBwSource +from .sybase_source import SybaseSource +from .postgre_sql_source import PostgreSqlSource +from .my_sql_source import MySqlSource +from .odbc_source import OdbcSource +from .db2_source import Db2Source +from .microsoft_access_source import MicrosoftAccessSource +from .informix_source import InformixSource +from .relational_source import RelationalSource +from .common_data_service_for_apps_source import CommonDataServiceForAppsSource +from .dynamics_crm_source import DynamicsCrmSource +from .dynamics_source import DynamicsSource +from .document_db_collection_source import DocumentDbCollectionSource +from .blob_source import BlobSource +from .azure_table_source import AzureTableSource +from .hdfs_read_settings import HdfsReadSettings +from .http_read_settings import HttpReadSettings +from .sftp_read_settings import SftpReadSettings +from .ftp_read_settings import FtpReadSettings +from .file_server_read_settings import FileServerReadSettings +from .amazon_s3_read_settings import AmazonS3ReadSettings +from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings +from .azure_blob_fs_read_settings import AzureBlobFSReadSettings +from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings +from .store_read_settings import StoreReadSettings +from .binary_source import BinarySource +from .json_source import JsonSource +from .format_read_settings import FormatReadSettings +from .delimited_text_read_settings import DelimitedTextReadSettings +from .delimited_text_source import DelimitedTextSource +from .parquet_source import ParquetSource +from .avro_source import AvroSource +from .copy_source import CopySource +from .lookup_activity import LookupActivity +from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity +from .log_storage_settings import LogStorageSettings +from .delete_activity import DeleteActivity +from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity +from .custom_activity_reference_object import CustomActivityReferenceObject +from .custom_activity import CustomActivity +from .ssis_access_credential import SSISAccessCredential +from .ssis_log_location import SSISLogLocation +from .ssis_property_override import SSISPropertyOverride +from .ssis_execution_parameter import SSISExecutionParameter +from .ssis_execution_credential import SSISExecutionCredential +from .ssis_package_location import SSISPackageLocation +from .execute_ssis_package_activity import ExecuteSSISPackageActivity +from .hd_insight_spark_activity import HDInsightSparkActivity +from .hd_insight_streaming_activity import HDInsightStreamingActivity +from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity +from .hd_insight_pig_activity import HDInsightPigActivity +from .hd_insight_hive_activity import HDInsightHiveActivity +from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings +from .staging_settings import StagingSettings +from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink +from .salesforce_service_cloud_sink import SalesforceServiceCloudSink +from .salesforce_sink import SalesforceSink +from .azure_data_explorer_sink import AzureDataExplorerSink +from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink +from .dynamics_crm_sink import DynamicsCrmSink +from .dynamics_sink import DynamicsSink +from .microsoft_access_sink import MicrosoftAccessSink +from .informix_sink import InformixSink +from .odbc_sink import OdbcSink +from .azure_search_index_sink import AzureSearchIndexSink +from .azure_blob_fs_sink import AzureBlobFSSink +from .azure_data_lake_store_sink import AzureDataLakeStoreSink +from .oracle_sink import OracleSink +from .polybase_settings import PolybaseSettings +from .sql_dw_sink import SqlDWSink +from .sql_mi_sink import SqlMISink +from .azure_sql_sink import AzureSqlSink +from .sql_server_sink import SqlServerSink +from .sql_sink import SqlSink +from .document_db_collection_sink import DocumentDbCollectionSink +from .file_system_sink import FileSystemSink +from .blob_sink import BlobSink +from .file_server_write_settings import FileServerWriteSettings +from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings +from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings +from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings +from .store_write_settings import StoreWriteSettings +from .binary_sink import BinarySink +from .parquet_sink import ParquetSink +from .json_write_settings import JsonWriteSettings +from .delimited_text_write_settings import DelimitedTextWriteSettings +from .format_write_settings import FormatWriteSettings +from .avro_write_settings import AvroWriteSettings +from .avro_sink import AvroSink +from .azure_table_sink import AzureTableSink +from .azure_queue_sink import AzureQueueSink +from .sap_cloud_for_customer_sink import SapCloudForCustomerSink +from .azure_my_sql_sink import AzureMySqlSink +from .azure_postgre_sql_sink import AzurePostgreSqlSink +from .json_sink import JsonSink +from .delimited_text_sink import DelimitedTextSink +from .copy_sink import CopySink +from .copy_activity import CopyActivity +from .execution_activity import ExecutionActivity +from .web_hook_activity import WebHookActivity +from .append_variable_activity import AppendVariableActivity +from .set_variable_activity import SetVariableActivity +from .filter_activity import FilterActivity +from .validation_activity import ValidationActivity +from .until_activity import UntilActivity +from .wait_activity import WaitActivity +from .for_each_activity import ForEachActivity +from .if_condition_activity import IfConditionActivity +from .execute_pipeline_activity import ExecutePipelineActivity +from .control_activity import ControlActivity +from .linked_integration_runtime import LinkedIntegrationRuntime +from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode +from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus +from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult +from .managed_integration_runtime_error import ManagedIntegrationRuntimeError +from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode +from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus +from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization +from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization +from .linked_integration_runtime_type import LinkedIntegrationRuntimeType +from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime +from .entity_reference import EntityReference +from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties +from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties +from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo +from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties +from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties +from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties +from .managed_integration_runtime import ManagedIntegrationRuntime +from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress +from .ssis_variable import SsisVariable +from .ssis_environment import SsisEnvironment +from .ssis_parameter import SsisParameter +from .ssis_package import SsisPackage +from .ssis_environment_reference import SsisEnvironmentReference +from .ssis_project import SsisProject +from .ssis_folder import SsisFolder +from .ssis_object_metadata import SsisObjectMetadata +from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse +from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData +from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData +from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys +from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters +from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo from .operation_paged import OperationPaged from .factory_paged import FactoryPaged from .integration_runtime_resource_paged import IntegrationRuntimeResourcePaged diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py index 033d0fd9591f..cfe0a5de2371 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py @@ -29,8 +29,8 @@ class AccessPolicyResponse(Model): 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, } - def __init__(self, **kwargs): - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = kwargs.get('policy', None) - self.access_token = kwargs.get('access_token', None) - self.data_plane_url = kwargs.get('data_plane_url', None) + def __init__(self, policy=None, access_token=None, data_plane_url=None): + super(AccessPolicyResponse, self).__init__() + self.policy = policy + self.access_token = access_token + self.data_plane_url = data_plane_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py deleted file mode 100644 index 2932f547ff26..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class AccessPolicyResponse(Model): - """Get Data Plane read only token response definition. - - :param policy: The user access policy. - :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy - :param access_token: Data Plane read only access token. - :type access_token: str - :param data_plane_url: Data Plane service base URL. - :type data_plane_url: str - """ - - _attribute_map = { - 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, - } - - def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: - super(AccessPolicyResponse, self).__init__(**kwargs) - self.policy = policy - self.access_token = access_token - self.data_plane_url = data_plane_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py index 72d920f1d04c..e6c03fc190a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py @@ -18,12 +18,10 @@ class Activity(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: ExecutionActivity, ControlActivity - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -31,7 +29,7 @@ class Activity(Model): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -53,11 +51,11 @@ class Activity(Model): 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} } - def __init__(self, **kwargs): - super(Activity, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.depends_on = kwargs.get('depends_on', None) - self.user_properties = kwargs.get('user_properties', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None): + super(Activity, self).__init__() + self.additional_properties = additional_properties + self.name = name + self.description = description + self.depends_on = depends_on + self.user_properties = user_properties self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py index a15b34acc24f..ab346ecbe635 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py @@ -15,15 +15,12 @@ class ActivityDependency(Model): """Activity dependency information. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param activity: Required. Activity name. + :param activity: Activity name. :type activity: str - :param dependency_conditions: Required. Match-Condition for the - dependency. + :param dependency_conditions: Match-Condition for the dependency. :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ @@ -39,8 +36,8 @@ class ActivityDependency(Model): 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, } - def __init__(self, **kwargs): - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.activity = kwargs.get('activity', None) - self.dependency_conditions = kwargs.get('dependency_conditions', None) + def __init__(self, activity, dependency_conditions, additional_properties=None): + super(ActivityDependency, self).__init__() + self.additional_properties = additional_properties + self.activity = activity + self.dependency_conditions = dependency_conditions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py deleted file mode 100644 index 2883a81a0adc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityDependency(Model): - """Activity dependency information. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param activity: Required. Activity name. - :type activity: str - :param dependency_conditions: Required. Match-Condition for the - dependency. - :type dependency_conditions: list[str or - ~azure.mgmt.datafactory.models.DependencyCondition] - """ - - _validation = { - 'activity': {'required': True}, - 'dependency_conditions': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'activity': {'key': 'activity', 'type': 'str'}, - 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, - } - - def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: - super(ActivityDependency, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.activity = activity - self.dependency_conditions = dependency_conditions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py index 4475cdbd9bea..9577e1431442 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py @@ -49,11 +49,11 @@ class ActivityPolicy(Model): 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, } - def __init__(self, **kwargs): - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.timeout = kwargs.get('timeout', None) - self.retry = kwargs.get('retry', None) - self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) - self.secure_input = kwargs.get('secure_input', None) - self.secure_output = kwargs.get('secure_output', None) + def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None, secure_input=None, secure_output=None): + super(ActivityPolicy, self).__init__() + self.additional_properties = additional_properties + self.timeout = timeout + self.retry = retry + self.retry_interval_in_seconds = retry_interval_in_seconds + self.secure_input = secure_input + self.secure_output = secure_output diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py deleted file mode 100644 index 52d469679974..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityPolicy(Model): - """Execution policy for an activity. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param timeout: Specifies the timeout for the activity to run. The default - timeout is 7 days. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type retry: object - :param retry_interval_in_seconds: Interval between each retry attempt (in - seconds). The default is 30 sec. - :type retry_interval_in_seconds: int - :param secure_input: When set to true, Input from activity is considered - as secure and will not be logged to monitoring. - :type secure_input: bool - :param secure_output: When set to true, Output from activity is considered - as secure and will not be logged to monitoring. - :type secure_output: bool - """ - - _validation = { - 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'timeout': {'key': 'timeout', 'type': 'object'}, - 'retry': {'key': 'retry', 'type': 'object'}, - 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, - 'secure_input': {'key': 'secureInput', 'type': 'bool'}, - 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, - } - - def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: - super(ActivityPolicy, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.timeout = timeout - self.retry = retry - self.retry_interval_in_seconds = retry_interval_in_seconds - self.secure_input = secure_input - self.secure_output = secure_output diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py deleted file mode 100644 index b5997c9352e1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Activity(Model): - """A pipeline activity. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ExecutionActivity, ControlActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(Activity, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.name = name - self.description = description - self.depends_on = depends_on - self.user_properties = user_properties - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py index 901ffe23cd4e..3492b892ef7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py @@ -84,9 +84,9 @@ class ActivityRun(Model): 'error': {'key': 'error', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(ActivityRun, self).__init__() + self.additional_properties = additional_properties self.pipeline_name = None self.pipeline_run_id = None self.activity_name = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py deleted file mode 100644 index 488e822de957..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityRun(Model): - """Information about an activity run in a pipeline. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar pipeline_name: The name of the pipeline. - :vartype pipeline_name: str - :ivar pipeline_run_id: The id of the pipeline run. - :vartype pipeline_run_id: str - :ivar activity_name: The name of the activity. - :vartype activity_name: str - :ivar activity_type: The type of the activity. - :vartype activity_type: str - :ivar activity_run_id: The id of the activity run. - :vartype activity_run_id: str - :ivar linked_service_name: The name of the compute linked service. - :vartype linked_service_name: str - :ivar status: The status of the activity run. - :vartype status: str - :ivar activity_run_start: The start time of the activity run in 'ISO 8601' - format. - :vartype activity_run_start: datetime - :ivar activity_run_end: The end time of the activity run in 'ISO 8601' - format. - :vartype activity_run_end: datetime - :ivar duration_in_ms: The duration of the activity run. - :vartype duration_in_ms: int - :ivar input: The input for the activity. - :vartype input: object - :ivar output: The output for the activity. - :vartype output: object - :ivar error: The error if any from the activity run. - :vartype error: object - """ - - _validation = { - 'pipeline_name': {'readonly': True}, - 'pipeline_run_id': {'readonly': True}, - 'activity_name': {'readonly': True}, - 'activity_type': {'readonly': True}, - 'activity_run_id': {'readonly': True}, - 'linked_service_name': {'readonly': True}, - 'status': {'readonly': True}, - 'activity_run_start': {'readonly': True}, - 'activity_run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'input': {'readonly': True}, - 'output': {'readonly': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, - 'activity_name': {'key': 'activityName', 'type': 'str'}, - 'activity_type': {'key': 'activityType', 'type': 'str'}, - 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, - 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'input': {'key': 'input', 'type': 'object'}, - 'output': {'key': 'output', 'type': 'object'}, - 'error': {'key': 'error', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ActivityRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.pipeline_name = None - self.pipeline_run_id = None - self.activity_name = None - self.activity_type = None - self.activity_run_id = None - self.linked_service_name = None - self.status = None - self.activity_run_start = None - self.activity_run_end = None - self.duration_in_ms = None - self.input = None - self.output = None - self.error = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py index 2fcd25a5ced2..f8cc1a9aa43e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py @@ -15,9 +15,7 @@ class ActivityRunsQueryResponse(Model): """A list activity runs. - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. + :param value: List of activity runs. :type value: list[~azure.mgmt.datafactory.models.ActivityRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -33,7 +31,7 @@ class ActivityRunsQueryResponse(Model): 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } - def __init__(self, **kwargs): - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) + def __init__(self, value, continuation_token=None): + super(ActivityRunsQueryResponse, self).__init__() + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py deleted file mode 100644 index ee3eae141635..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ActivityRunsQueryResponse(Model): - """A list activity runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of activity runs. - :type value: list[~azure.mgmt.datafactory.models.ActivityRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ActivityRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(ActivityRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py index b1e5ed533bba..c3f2f6fa2499 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -15,8 +15,6 @@ class AmazonMWSLinkedService(LinkedService): """Amazon Marketplace Web Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,20 +29,20 @@ class AmazonMWSLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + :param endpoint: The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to - retrieve data from. To retrieve data from multiple Marketplace IDs, - separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :param marketplace_id: The Amazon Marketplace ID you want to retrieve data + from. To retrieve data from multiple Marketplace IDs, separate them with a + comma (,). (i.e. A2EUQ1WTGCTBG2) :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. + :param seller_id: The Amazon seller ID. :type seller_id: object :param mws_auth_token: The Amazon MWS authentication token. :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. + :param access_key_id: The access key id used to access data. :type access_key_id: object :param secret_key: The secret key used to access data. :type secret_key: ~azure.mgmt.datafactory.models.SecretBase @@ -91,16 +89,16 @@ class AmazonMWSLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonMWSLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.marketplace_id = kwargs.get('marketplace_id', None) - self.seller_id = kwargs.get('seller_id', None) - self.mws_auth_token = kwargs.get('mws_auth_token', None) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_key = kwargs.get('secret_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.marketplace_id = marketplace_id + self.seller_id = seller_id + self.mws_auth_token = mws_auth_token + self.access_key_id = access_key_id + self.secret_key = secret_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py deleted file mode 100644 index a8db63933154..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AmazonMWSLinkedService(LinkedService): - """Amazon Marketplace Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. - mws.amazonservices.com) - :type endpoint: object - :param marketplace_id: Required. The Amazon Marketplace ID you want to - retrieve data from. To retrieve data from multiple Marketplace IDs, - separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) - :type marketplace_id: object - :param seller_id: Required. The Amazon seller ID. - :type seller_id: object - :param mws_auth_token: The Amazon MWS authentication token. - :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: Required. The access key id used to access data. - :type access_key_id: object - :param secret_key: The secret key used to access data. - :type secret_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'marketplace_id': {'required': True}, - 'seller_id': {'required': True}, - 'access_key_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, - 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, - 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.marketplace_id = marketplace_id - self.seller_id = seller_id - self.mws_auth_token = mws_auth_token - self.access_key_id = access_key_id - self.secret_key = secret_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py index 9885f5c77d8c..3bb4ec150825 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py @@ -15,8 +15,6 @@ class AmazonMWSObjectDataset(Dataset): """Amazon Marketplace Web Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AmazonMWSObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AmazonMWSObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class AmazonMWSObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonMWSObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py deleted file mode 100644 index 015ed9401c15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AmazonMWSObjectDataset(Dataset): - """Amazon Marketplace Web Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py index f9d034e610d4..7a84c35b3418 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -15,8 +15,6 @@ class AmazonMWSSource(CopySource): """A copy activity Amazon Marketplace Web Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AmazonMWSSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class AmazonMWSSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonMWSSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py deleted file mode 100644 index 9ef7f5b30244..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AmazonMWSSource(CopySource): - """A copy activity Amazon Marketplace Web Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py index 4272b28c13f5..117840257edf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -15,8 +15,6 @@ class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,18 +29,18 @@ class AmazonRedshiftLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param server: Required. The name of the Amazon Redshift server. Type: - string (or Expression with resultType string). + :param server: The name of the Amazon Redshift server. Type: string (or + Expression with resultType string). :type server: object :param username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). :type username: object :param password: The password of the Amazon Redshift source. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift - source. Type: string (or Expression with resultType string). + :param database: The database name of the Amazon Redshift source. Type: + string (or Expression with resultType string). :type database: object :param port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer @@ -75,12 +73,12 @@ class AmazonRedshiftLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.database = kwargs.get('database', None) - self.port = kwargs.get('port', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None): + super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.username = username + self.password = password + self.database = database + self.port = port + self.encrypted_credential = encrypted_credential self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py deleted file mode 100644 index 3b84583c6c86..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AmazonRedshiftLinkedService(LinkedService): - """Linked service for Amazon Redshift. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The name of the Amazon Redshift server. Type: - string (or Expression with resultType string). - :type server: object - :param username: The username of the Amazon Redshift source. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password of the Amazon Redshift source. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. The database name of the Amazon Redshift - source. Type: string (or Expression with resultType string). - :type database: object - :param port: The TCP port number that the Amazon Redshift server uses to - listen for client connections. The default value is 5439. Type: integer - (or Expression with resultType integer). - :type port: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.username = username - self.password = password - self.database = database - self.port = port - self.encrypted_credential = encrypted_credential - self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py index d4fdfa4aa2ba..61f7ae3c5cd4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -15,8 +15,6 @@ class AmazonRedshiftSource(CopySource): """A copy activity source for Amazon Redshift Source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AmazonRedshiftSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -58,8 +56,8 @@ class AmazonRedshiftSource(CopySource): 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, **kwargs): - super(AmazonRedshiftSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None): + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py deleted file mode 100644 index 9b34b2ef5b97..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AmazonRedshiftSource(CopySource): - """A copy activity source for Amazon Redshift Source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param redshift_unload_settings: The Amazon S3 settings needed for the - interim Amazon S3 when copying from Amazon Redshift with unload. With - this, data from Amazon Redshift source will be unloaded into S3 first and - then copied into the targeted sink from the interim S3. - :type redshift_unload_settings: - ~azure.mgmt.datafactory.models.RedshiftUnloadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.redshift_unload_settings = redshift_unload_settings - self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py index 987151367421..a7f86a62e4d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py @@ -15,8 +15,6 @@ class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AmazonRedshiftTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AmazonRedshiftTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class AmazonRedshiftTableDataset(Dataset): 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonRedshiftTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.amazon_redshift_table_dataset_schema = kwargs.get('amazon_redshift_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None): + super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py deleted file mode 100644 index ceceaaba43e4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AmazonRedshiftTableDataset(Dataset): - """The Amazon Redshift table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The Amazon Redshift table name. Type: string (or Expression - with resultType string). - :type table: object - :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema - name. Type: string (or Expression with resultType string). - :type amazon_redshift_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: - super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema - self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py index e91a5ba26131..534b7de2049d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -15,8 +15,6 @@ class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AmazonS3Dataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class AmazonS3Dataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: - string (or Expression with resultType string). + :param bucket_name: The name of the Amazon S3 bucket. Type: string (or + Expression with resultType string). :type bucket_name: object :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). @@ -94,14 +92,14 @@ class AmazonS3Dataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(AmazonS3Dataset, self).__init__(**kwargs) - self.bucket_name = kwargs.get('bucket_name', None) - self.key = kwargs.get('key', None) - self.prefix = kwargs.get('prefix', None) - self.version = kwargs.get('version', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None): + super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.bucket_name = bucket_name + self.key = key + self.prefix = prefix + self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py deleted file mode 100644 index d84ae48b2a46..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AmazonS3Dataset(Dataset): - """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param bucket_name: Required. The name of the Amazon S3 bucket. Type: - string (or Expression with resultType string). - :type bucket_name: object - :param key: The key of the Amazon S3 object. Type: string (or Expression - with resultType string). - :type key: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param version: The version for the S3 object. Type: string (or Expression - with resultType string). - :type version: object - :param modified_datetime_start: The start of S3 object's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of S3 object's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the Amazon S3 - object. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'object'}, - 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: - super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.bucket_name = bucket_name - self.key = key - self.prefix = prefix - self.version = version - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression - self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py index 250518c1a7ec..aecfbcacb30c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -15,8 +15,6 @@ class AmazonS3LinkedService(LinkedService): """Linked service for Amazon S3. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AmazonS3LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with @@ -68,10 +66,10 @@ class AmazonS3LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonS3LinkedService, self).__init__(**kwargs) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None): + super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py deleted file mode 100644 index 8d136bb71fc0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AmazonS3LinkedService(LinkedService): - """Linked service for Amazon S3. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param access_key_id: The access key identifier of the Amazon S3 Identity - and Access Management (IAM) user. Type: string (or Expression with - resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Amazon S3 Identity - and Access Management (IAM) user. - :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_url: This value specifies the endpoint to access with the - S3 Connector. This is an optional property; change it only if you want to - try a different service endpoint or want to switch between https and http. - Type: string (or Expression with resultType string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: - super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.service_url = service_url - self.encrypted_credential = encrypted_credential - self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py index 74c77a16f0f2..555e57d8adac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -15,12 +15,10 @@ class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -49,7 +47,7 @@ class AmazonS3Location(DatasetLocation): 'version': {'key': 'version', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonS3Location, self).__init__(**kwargs) - self.bucket_name = kwargs.get('bucket_name', None) - self.version = kwargs.get('version', None) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None): + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + self.bucket_name = bucket_name + self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py deleted file mode 100644 index 36afce341ada..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AmazonS3Location(DatasetLocation): - """The location of amazon S3 dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param bucket_name: Specify the bucketName of amazon S3. Type: string (or - Expression with resultType string) - :type bucket_name: object - :param version: Specify the version of amazon S3. Type: string (or - Expression with resultType string). - :type version: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - 'version': {'key': 'version', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: - super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.bucket_name = bucket_name - self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py index e83910136070..72af6571d726 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py @@ -15,12 +15,10 @@ class AmazonS3ReadSettings(StoreReadSettings): """Azure data lake store read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -67,12 +65,12 @@ class AmazonS3ReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AmazonS3ReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.prefix = kwargs.get('prefix', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py deleted file mode 100644 index 79645a869ac8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param prefix: The prefix filter for the S3 object name. Type: string (or - Expression with resultType string). - :type prefix: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'prefix': {'key': 'prefix', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.prefix = prefix - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py index 36a25e959061..a32f100fbde5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py @@ -15,12 +15,10 @@ class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class AppendVariableActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param variable_name: Name of the variable whose value needs to be appended to. @@ -53,8 +51,8 @@ class AppendVariableActivity(ControlActivity): 'value': {'key': 'typeProperties.value', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AppendVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, variable_name=None, value=None): + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.variable_name = variable_name + self.value = value self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py deleted file mode 100644 index 4526a6e4a45e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class AppendVariableActivity(ControlActivity): - """Append value for a Variable of type Array. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be - appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.variable_name = variable_name - self.value = value - self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py index d206ac99ab85..febe10329e14 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py @@ -15,8 +15,6 @@ class AvroDataset(Dataset): """Avro dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AvroDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,9 +39,9 @@ class AvroDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param location: Required. The location of the avro storage. + :param location: The location of the avro storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param avro_compression_codec: Possible values include: 'none', 'deflate', 'snappy', 'xz', 'bzip2' @@ -75,9 +73,9 @@ class AvroDataset(Dataset): 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } - def __init__(self, **kwargs): - super(AvroDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.avro_compression_codec = kwargs.get('avro_compression_codec', None) - self.avro_compression_level = kwargs.get('avro_compression_level', None) + def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level=None): + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py deleted file mode 100644 index f0f44dbbd786..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AvroDataset(Dataset): - """Avro dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the avro storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param avro_compression_codec: Possible values include: 'none', 'deflate', - 'snappy', 'xz', 'bzip2' - :type avro_compression_codec: str or - ~azure.mgmt.datafactory.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.avro_compression_codec = avro_compression_codec - self.avro_compression_level = avro_compression_level - self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py index f0346a76080c..0a015516867e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py @@ -15,8 +15,6 @@ class AvroFormat(DatasetStorageFormat): """The data stored in Avro format. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class AvroFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -34,13 +32,6 @@ class AvroFormat(DatasetStorageFormat): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AvroFormat, self).__init__(**kwargs) + def __init__(self, additional_properties=None, serializer=None, deserializer=None): + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py deleted file mode 100644 index 35d459c4b2a6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class AvroFormat(DatasetStorageFormat): - """The data stored in Avro format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py index 34d4ceb1e0f6..2e3c5b260426 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py @@ -15,8 +15,6 @@ class AvroSink(CopySink): """A copy activity Avro sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -62,8 +60,8 @@ class AvroSink(CopySink): 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } - def __init__(self, **kwargs): - super(AvroSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings + self.format_settings = format_settings self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py deleted file mode 100644 index 16363092dff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AvroSink(CopySink): - """A copy activity Avro sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Avro format settings. - :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py index 3ea2e7a2a76f..6203410a93a8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py @@ -15,8 +15,6 @@ class AvroSource(CopySource): """A copy activity Avro source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AvroSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -50,7 +48,7 @@ class AvroSource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, **kwargs): - super(AvroSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py deleted file mode 100644 index 74b5e6db0fe2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AvroSource(CopySource): - """A copy activity Avro source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Avro store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py index ec068ee29885..6d739312c2a5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py @@ -15,12 +15,10 @@ class AvroWriteSettings(FormatWriteSettings): """Avro write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. + :param type: The write setting type. :type type: str :param record_name: Top level record name in write result, which is required in AVRO spec. @@ -40,7 +38,7 @@ class AvroWriteSettings(FormatWriteSettings): 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, } - def __init__(self, **kwargs): - super(AvroWriteSettings, self).__init__(**kwargs) - self.record_name = kwargs.get('record_name', None) - self.record_namespace = kwargs.get('record_namespace', None) + def __init__(self, type, additional_properties=None, record_name=None, record_namespace=None): + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type) + self.record_name = record_name + self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py deleted file mode 100644 index d14ebc4d1d29..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings_py3 import FormatWriteSettings - - -class AvroWriteSettings(FormatWriteSettings): - """Avro write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param record_name: Top level record name in write result, which is - required in AVRO spec. - :type record_name: str - :param record_namespace: Record namespace in the write result. - :type record_namespace: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'record_name': {'key': 'recordName', 'type': 'str'}, - 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: - super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.record_name = record_name - self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py index 986023308e23..32d07953ab5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -15,8 +15,6 @@ class AzureBatchLinkedService(LinkedService): """Azure Batch linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,21 +29,20 @@ class AzureBatchLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param account_name: Required. The Azure Batch account name. Type: string - (or Expression with resultType string). + :param account_name: The Azure Batch account name. Type: string (or + Expression with resultType string). :type account_name: object :param access_key: The Azure Batch account access key. :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or - Expression with resultType string). + :param batch_uri: The Azure Batch URI. Type: string (or Expression with + resultType string). :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or - Expression with resultType string). + :param pool_name: The Azure Batch pool name. Type: string (or Expression + with resultType string). :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service - reference. + :param linked_service_name: The Azure Storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for @@ -77,12 +74,12 @@ class AzureBatchLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBatchLinkedService, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.access_key = kwargs.get('access_key', None) - self.batch_uri = kwargs.get('batch_uri', None) - self.pool_name = kwargs.get('pool_name', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None): + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.account_name = account_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name + self.encrypted_credential = encrypted_credential self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py deleted file mode 100644 index e7d33dfb342a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Batch account name. Type: string - (or Expression with resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or - Expression with resultType string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or - Expression with resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.account_name = account_name - self.access_key = access_key - self.batch_uri = batch_uri - self.pool_name = pool_name - self.linked_service_name = linked_service_name - self.encrypted_credential = encrypted_credential - self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py index 01814cf8f9a9..c02349fc7fe2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -15,8 +15,6 @@ class AzureBlobDataset(Dataset): """The Azure Blob storage. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureBlobDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureBlobDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). @@ -88,13 +86,13 @@ class AzureBlobDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(AzureBlobDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.table_root_location = kwargs.get('table_root_location', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None): + super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.folder_path = folder_path + self.table_root_location = table_root_location + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py deleted file mode 100644 index 706c39deb289..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureBlobDataset(Dataset): - """The Azure Blob storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Blob storage. Type: string (or - Expression with resultType string). - :type folder_path: object - :param table_root_location: The root of blob path. Type: string (or - Expression with resultType string). - :type table_root_location: object - :param file_name: The name of the Azure Blob. Type: string (or Expression - with resultType string). - :type file_name: object - :param modified_datetime_start: The start of Azure Blob's modified - datetime. Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of Azure Blob's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the Azure Blob storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: - super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.table_root_location = table_root_location - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression - self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py index 0ef62ff7122f..db988ec7988d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -15,8 +15,6 @@ class AzureBlobFSDataset(Dataset): """The Azure Data Lake Storage Gen2 storage. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureBlobFSDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureBlobFSDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). @@ -76,10 +74,10 @@ class AzureBlobFSDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(AzureBlobFSDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None): + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py deleted file mode 100644 index 82136a683fd3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureBlobFSDataset(Dataset): - """The Azure Data Lake Storage Gen2 storage. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. - Type: string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the Azure Data Lake Storage Gen2. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Azure Data Lake Storage Gen2 storage. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the blob storage. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: - super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression - self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py index 262ce976227b..5b8d624d719f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -15,8 +15,6 @@ class AzureBlobFSLinkedService(LinkedService): """Azure Data Lake Storage Gen2 linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class AzureBlobFSLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). + :param url: Endpoint for the Azure Data Lake Storage Gen2 service. Type: + string (or Expression with resultType string). :type url: object :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). @@ -75,12 +73,12 @@ class AzureBlobFSLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobFSLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py deleted file mode 100644 index f0d555078bf7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 - service. Type: string (or Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. Type: - string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.account_key = account_key - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py index c21525bbac4c..775127e81ac3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -15,12 +15,10 @@ class AzureBlobFSLocation(DatasetLocation): """The location of azure blobFS dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -45,6 +43,6 @@ class AzureBlobFSLocation(DatasetLocation): 'file_system': {'key': 'fileSystem', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobFSLocation, self).__init__(**kwargs) - self.file_system = kwargs.get('file_system', None) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, file_system=None): + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py deleted file mode 100644 index afbae52fdeb0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AzureBlobFSLocation(DatasetLocation): - """The location of azure blobFS dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param file_system: Specify the fileSystem of azure blobFS. Type: string - (or Expression with resultType string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'file_system': {'key': 'fileSystem', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: - super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py index 6d80ce72ea57..03e35d1b4791 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py @@ -15,12 +15,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): """Azure blobFS read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -63,11 +61,11 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobFSReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py deleted file mode 100644 index af4746e84f8e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AzureBlobFSReadSettings(StoreReadSettings): - """Azure blobFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py index a47b173c6581..586e9fe08c30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -15,8 +15,6 @@ class AzureBlobFSSink(CopySink): """A copy activity Azure Data Lake Storage Gen2 sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object @@ -59,7 +57,7 @@ class AzureBlobFSSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobFSSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None): + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.copy_behavior = copy_behavior self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py deleted file mode 100644 index e2b28bf30a8c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureBlobFSSink(CopySink): - """A copy activity Azure Data Lake Storage Gen2 sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py index 0252ffd5ba8f..cb74db742e84 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -15,8 +15,6 @@ class AzureBlobFSSource(CopySource): """A copy activity Azure BlobFS source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureBlobFSSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). @@ -60,9 +58,9 @@ class AzureBlobFSSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobFSSource, self).__init__(**kwargs) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None): + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py deleted file mode 100644 index 5b512c1f334f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureBlobFSSource(CopySource): - """A copy activity Azure BlobFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive - self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py index f91971b829f7..af32061a91a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py @@ -15,8 +15,6 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): """Azure blobFS write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). @@ -45,7 +43,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobFSWriteSettings, self).__init__(**kwargs) - self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None): + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) + self.block_size_in_mb = block_size_in_mb self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py deleted file mode 100644 index 351eae467183..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class AzureBlobFSWriteSettings(StoreWriteSettings): - """Azure blobFS write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.block_size_in_mb = block_size_in_mb - self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py index 5246e02ab9b4..43a62d85b5b6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -15,8 +15,6 @@ class AzureBlobStorageLinkedService(LinkedService): """The azure blob storage linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureBlobStorageLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or @@ -90,15 +88,15 @@ class AzureBlobStorageLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, **kwargs): - super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.service_endpoint = kwargs.get('service_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py deleted file mode 100644 index ba0a511532b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py +++ /dev/null @@ -1,104 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually - exclusive with connectionString, serviceEndpoint property. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage - resource. It is mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.service_endpoint = service_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py index 1efbbeaec352..0dfe27f82df9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -15,12 +15,10 @@ class AzureBlobStorageLocation(DatasetLocation): """The location of azure blob dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -45,6 +43,6 @@ class AzureBlobStorageLocation(DatasetLocation): 'container': {'key': 'container', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobStorageLocation, self).__init__(**kwargs) - self.container = kwargs.get('container', None) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, container=None): + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py deleted file mode 100644 index 63b122573039..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AzureBlobStorageLocation(DatasetLocation): - """The location of azure blob dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param container: Specify the container of azure blob. Type: string (or - Expression with resultType string). - :type container: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'container': {'key': 'container', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: - super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py index 42b11cc6de16..b9ae45f52d42 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py @@ -15,12 +15,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): """Azure blob read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -63,11 +61,11 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobStorageReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py deleted file mode 100644 index 495ea16afd98..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AzureBlobStorageReadSettings(StoreReadSettings): - """Azure blob read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py index c2834839f28a..b0be564d2fc4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py @@ -15,8 +15,6 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): """Azure blob write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). @@ -45,7 +43,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) - self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None): + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) + self.block_size_in_mb = block_size_in_mb self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py deleted file mode 100644 index a37c83039a8c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class AzureBlobStorageWriteSettings(StoreWriteSettings): - """Azure blob write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - :param block_size_in_mb: Indicates the block size(MB) when writing data to - blob. Type: integer (or Expression with resultType integer). - :type block_size_in_mb: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.block_size_in_mb = block_size_in_mb - self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py index 308d445d1726..8d44febfbdb1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py @@ -15,12 +15,10 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): """Azure Data Explorer command activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,16 +26,15 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data - Explorer command syntax. Type: string (or Expression with resultType - string). + :param command: A control command, according to the Azure Data Explorer + command syntax. Type: string (or Expression with resultType string). :type command: object :param command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: @@ -64,8 +61,8 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.command = kwargs.get('command', None) - self.command_timeout = kwargs.get('command_timeout', None) + def __init__(self, name, command, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None): + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.command = command + self.command_timeout = command_timeout self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py deleted file mode 100644 index 2f04dfddf08f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data - Explorer command syntax. Type: string (or Expression with resultType - string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) - :type command_timeout: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, - } - - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.command = command - self.command_timeout = command_timeout - self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py index 5e5a9f7560c6..f14cc8b88946 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -15,8 +15,6 @@ class AzureDataExplorerLinkedService(LinkedService): """Azure Data Explorer (Kusto) linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,25 +29,25 @@ class AzureDataExplorerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of Azure Data Explorer (the - engine's endpoint). URL will be in the format + :param endpoint: The endpoint of Azure Data Explorer (the engine's + endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). + :param service_principal_id: The ID of the service principal used to + authenticate against Azure Data Explorer. Type: string (or Expression with + resultType string). :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. + :param service_principal_key: The key of the service principal used to + authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. Type: string (or Expression + with resultType string). :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). :type tenant: object """ @@ -76,11 +74,11 @@ class AzureDataExplorerLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.database = kwargs.get('database', None) - self.tenant = kwargs.get('tenant', None) + def __init__(self, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py deleted file mode 100644 index 3cd8ab9c3c19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of Azure Data Explorer (the - engine's endpoint). URL will be in the format - https://..kusto.windows.net. Type: string (or - Expression with resultType string) - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - } - - def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant - self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py index 5c204ab769e4..4fdec7ed5ea9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -15,8 +15,6 @@ class AzureDataExplorerSink(CopySink): """A copy activity Azure Data Explorer sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. @@ -68,9 +66,9 @@ class AzureDataExplorerSink(CopySink): 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataExplorerSink, self).__init__(**kwargs) - self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) - self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) - self.flush_immediately = kwargs.get('flush_immediately', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None): + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py deleted file mode 100644 index e5cb67bc79b8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param ingestion_mapping_name: A name of a pre-created csv mapping that - was defined on the target Kusto table. Type: string. - :type ingestion_mapping_name: object - :param ingestion_mapping_as_json: An explicit column mapping description - provided in a json format. Type: string. - :type ingestion_mapping_as_json: object - :param flush_immediately: If set to true, any aggregation will be skipped. - Default is false. Type: boolean. - :type flush_immediately: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, - 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, - 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ingestion_mapping_name = ingestion_mapping_name - self.ingestion_mapping_as_json = ingestion_mapping_as_json - self.flush_immediately = flush_immediately - self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py index 2caaa517efd5..18c6017f6ad8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -15,8 +15,6 @@ class AzureDataExplorerSource(CopySource): """A copy activity Azure Data Explorer (Kusto) source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class AzureDataExplorerSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param query: Required. Database query. Should be a Kusto Query Language - (KQL) query. Type: string (or Expression with resultType string). + :param query: Database query. Should be a Kusto Query Language (KQL) + query. Type: string (or Expression with resultType string). :type query: object :param no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count @@ -62,9 +60,9 @@ class AzureDataExplorerSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataExplorerSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.no_truncation = kwargs.get('no_truncation', None) - self.query_timeout = kwargs.get('query_timeout', None) + def __init__(self, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None): + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py deleted file mode 100644 index 55a6bc78ee04..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureDataExplorerSource(CopySource): - """A copy activity Azure Data Explorer (Kusto) source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Required. Database query. Should be a Kusto Query Language - (KQL) query. Type: string (or Expression with resultType string). - :type query: object - :param no_truncation: The name of the Boolean option that controls whether - truncation is applied to result-sets that go beyond a certain row-count - limit. - :type no_truncation: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. - :type query_timeout: object - """ - - _validation = { - 'type': {'required': True}, - 'query': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - } - - def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.no_truncation = no_truncation - self.query_timeout = query_timeout - self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py index 594d22171f48..ed3ecf7687d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -15,8 +15,6 @@ class AzureDataExplorerTableDataset(Dataset): """The Azure Data Explorer (Kusto) dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureDataExplorerTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureDataExplorerTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class AzureDataExplorerTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None): + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table = table self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py deleted file mode 100644 index d36b0f39c2fe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table: The table name of the Azure Data Explorer database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table = table - self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py index 0381e1b1de65..bed62664372c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -15,8 +15,6 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): """Azure Data Lake Analytics linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param account_name: Required. The Azure Data Lake Analytics account name. - Type: string (or Expression with resultType string). + :param account_name: The Azure Data Lake Analytics account name. Type: + string (or Expression with resultType string). :type account_name: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string @@ -43,8 +41,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). :type tenant: object :param subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with @@ -86,14 +84,14 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, account_name, tenant, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None): + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri + self.encrypted_credential = encrypted_credential self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py deleted file mode 100644 index 93250e2cef76..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param account_name: Required. The Azure Data Lake Analytics account name. - Type: string (or Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Analytics account. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Analytics account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group - name (if different from Data Factory account). Type: string (or Expression - with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string - (or Expression with resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.account_name = account_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.data_lake_analytics_uri = data_lake_analytics_uri - self.encrypted_credential = encrypted_credential - self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py index de15057f78ed..5f8b1fa7e1ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -15,8 +15,6 @@ class AzureDataLakeStoreDataset(Dataset): """Azure Data Lake Store dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureDataLakeStoreDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureDataLakeStoreDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -77,10 +75,10 @@ class AzureDataLakeStoreDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(AzureDataLakeStoreDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None): + super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py deleted file mode 100644 index d2df0ffebe7e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureDataLakeStoreDataset(Dataset): - """Azure Data Lake Store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: Path to the folder in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the file in the Azure Data Lake Store. Type: - string (or Expression with resultType string). - :type file_name: object - :param format: The format of the Data Lake Store. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used for the item(s) in - the Azure Data Lake Store. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: - super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression - self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py index f08e086cb500..7ee881709aaf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -15,8 +15,6 @@ class AzureDataLakeStoreLinkedService(LinkedService): """Azure Data Lake Store linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: - string (or Expression with resultType string). + :param data_lake_store_uri: Data Lake Store service URI. Type: string (or + Expression with resultType string). :type data_lake_store_uri: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or @@ -85,14 +83,14 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.account_name = kwargs.get('account_name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, data_lake_store_uri, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None): + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.data_lake_store_uri = data_lake_store_uri + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.encrypted_credential = encrypted_credential self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py deleted file mode 100644 index 7b8ab293c0cf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: - string (or Expression with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to - authenticate against the Azure Data Lake Store account. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to - authenticate against the Azure Data Lake Store account. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if - different from Data Factory account). Type: string (or Expression with - resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name - (if different from Data Factory account). Type: string (or Expression with - resultType string). - :type resource_group_name: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.data_lake_store_uri = data_lake_store_uri - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.account_name = account_name - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.encrypted_credential = encrypted_credential - self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py index a4bf521a2005..370f0ca78637 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -15,12 +15,10 @@ class AzureDataLakeStoreLocation(DatasetLocation): """The location of azure data lake store dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -34,12 +32,5 @@ class AzureDataLakeStoreLocation(DatasetLocation): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreLocation, self).__init__(**kwargs) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py deleted file mode 100644 index e7955731fc31..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class AzureDataLakeStoreLocation(DatasetLocation): - """The location of azure data lake store dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py index 213d69966baf..fd9d1e2a18ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py @@ -15,12 +15,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): """Azure data lake store read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -63,11 +61,11 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py deleted file mode 100644 index b4bccc5e78a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class AzureDataLakeStoreReadSettings(StoreReadSettings): - """Azure data lake store read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: ADLS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index e882698c2ca6..89d27bc70416 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -15,8 +15,6 @@ class AzureDataLakeStoreSink(CopySink): """A copy activity Azure Data Lake Store sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object @@ -62,8 +60,8 @@ class AzureDataLakeStoreSink(CopySink): 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataLakeStoreSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) - self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None): + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py deleted file mode 100644 index 0f96cea725e2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureDataLakeStoreSink(CopySink): - """A copy activity Azure Data Lake Store sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param enable_adls_single_file_parallel: Single File Parallel. - :type enable_adls_single_file_parallel: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.enable_adls_single_file_parallel = enable_adls_single_file_parallel - self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py index 9d2046049a30..e2c97c39e570 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -15,8 +15,6 @@ class AzureDataLakeStoreSource(CopySource): """A copy activity Azure Data Lake source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureDataLakeStoreSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType @@ -52,7 +50,7 @@ class AzureDataLakeStoreSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDataLakeStoreSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None): + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py deleted file mode 100644 index e1d883972220..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureDataLakeStoreSource(CopySource): - """A copy activity Azure Data Lake source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py index 6cf8deeacb07..56dab7c2ddb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py @@ -15,8 +15,6 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): """Azure data lake store write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -34,13 +32,6 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None): + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py deleted file mode 100644 index 0b9a0e38e41c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class AzureDataLakeStoreWriteSettings(StoreWriteSettings): - """Azure data lake store write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py index 6cc4c12674cb..15d44e98a22b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -15,8 +15,6 @@ class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,14 +29,14 @@ class AzureDatabricksLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param domain: Required. .azuredatabricks.net, domain name of your + :param domain: .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param existing_cluster_id: The id of an existing cluster that will be @@ -108,19 +106,19 @@ class AzureDatabricksLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.access_token = kwargs.get('access_token', None) - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, domain, access_token, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None): + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py deleted file mode 100644 index 6299dac1e3f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py +++ /dev/null @@ -1,126 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param domain: Required. .azuredatabricks.net, domain name of your - Databricks deployment. Type: string (or Expression with resultType - string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer - to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: - string (or Expression with resultType string). - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param existing_cluster_id: The id of an existing cluster that will be - used for all runs of this job. Type: string (or Expression with resultType - string). - :type existing_cluster_id: object - :param new_cluster_version: The Spark version of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: Number of worker nodes that new cluster - should have. A string formatted Int32, like '1' means numOfWorker is 1 or - '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or - Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node types of new cluster. Type: string - (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark - configuration key-value pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark - environment variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new - cluster. Type: string (or Expression with resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for - the new cluster. Type: array of strings (or Expression with resultType - array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new - cluster. Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential - self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py index 68b02e5f771f..292beae2bf2e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py @@ -15,12 +15,10 @@ class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,20 +26,19 @@ class AzureFunctionActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :param method: Rest API method for target endpoint. Possible values + include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure - Function Activity will call. Type: string (or Expression with resultType - string) + :param function_name: Name of the Function that the Azure Function + Activity will call. Type: string (or Expression with resultType string) :type function_name: object :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { @@ -76,10 +73,10 @@ class AzureFunctionActivity(ExecutionActivity): 'body': {'key': 'typeProperties.body', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureFunctionActivity, self).__init__(**kwargs) - self.method = kwargs.get('method', None) - self.function_name = kwargs.get('function_name', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) + def __init__(self, name, method, function_name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None): + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py deleted file mode 100644 index 95bb1ca260e7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' - :type method: str or - ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure - Function Activity will call. Type: string (or Expression with resultType - string) - :type function_name: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - } - - def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.method = method - self.function_name = function_name - self.headers = headers - self.body = body - self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py index 2ed5b870a778..581c4bd11b65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -15,8 +15,6 @@ class AzureFunctionLinkedService(LinkedService): """Azure Function linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class AzureFunctionLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param function_app_url: Required. The endpoint of the Azure Function App. - URL will be in the format https://.azurewebsites.net. + :param function_app_url: The endpoint of the Azure Function App. URL will + be in the format https://.azurewebsites.net. :type function_app_url: object :param function_key: Function or Host key for Azure Function App. :type function_key: ~azure.mgmt.datafactory.models.SecretBase @@ -61,9 +59,9 @@ class AzureFunctionLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.function_app_url = kwargs.get('function_app_url', None) - self.function_key = kwargs.get('function_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, function_app_url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None): + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.function_app_url = function_app_url + self.function_key = function_key + self.encrypted_credential = encrypted_credential self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py deleted file mode 100644 index a1bfdbe8b6c1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param function_app_url: Required. The endpoint of the Azure Function App. - URL will be in the format https://.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'function_app_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.function_app_url = function_app_url - self.function_key = function_key - self.encrypted_credential = encrypted_credential - self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py index 768f0d83ae93..97daf26a30b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -15,8 +15,6 @@ class AzureKeyVaultLinkedService(LinkedService): """Azure Key Vault linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class AzureKeyVaultLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param base_url: Required. The base URL of the Azure Key Vault. e.g. + :param base_url: The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object @@ -54,7 +52,7 @@ class AzureKeyVaultLinkedService(LinkedService): 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.base_url = kwargs.get('base_url', None) + def __init__(self, base_url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.base_url = base_url self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py deleted file mode 100644 index 50f4a58a5a1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType - string). - :type base_url: object - """ - - _validation = { - 'type': {'required': True}, - 'base_url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, - } - - def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.base_url = base_url - self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py index 28d3e7d31cee..9e5e976fa083 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py @@ -15,14 +15,12 @@ class AzureKeyVaultSecretReference(SecretBase): """Azure Key Vault secret reference. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param store: Required. The Azure Key Vault linked service reference. + :param store: The Azure Key Vault linked service reference. :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. - Type: string (or Expression with resultType string). + :param secret_name: The name of the secret in Azure Key Vault. Type: + string (or Expression with resultType string). :type secret_name: object :param secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or @@ -43,9 +41,9 @@ class AzureKeyVaultSecretReference(SecretBase): 'secret_version': {'key': 'secretVersion', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.store = kwargs.get('store', None) - self.secret_name = kwargs.get('secret_name', None) - self.secret_version = kwargs.get('secret_version', None) + def __init__(self, store, secret_name, secret_version=None): + super(AzureKeyVaultSecretReference, self).__init__() + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py deleted file mode 100644 index c5fe4c7afbd4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .secret_base_py3 import SecretBase - - -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. - Type: string (or Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The - default value is the latest version of the secret. Type: string (or - Expression with resultType string). - :type secret_version: object - """ - - _validation = { - 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, - } - - def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.store = store - self.secret_name = secret_name - self.secret_version = secret_version - self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py index d2dc7db88851..4f0f8b9dcb9f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py @@ -15,8 +15,6 @@ class AzureMariaDBLinkedService(LinkedService): """Azure Database for MariaDB linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureMariaDBLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -61,9 +59,9 @@ class AzureMariaDBLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py deleted file mode 100644 index c80015ed6b45..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py index 229e6f4311e3..b87be8d78c18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py @@ -15,8 +15,6 @@ class AzureMariaDBSource(CopySource): """A copy activity Azure MariaDB source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureMariaDBSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class AzureMariaDBSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMariaDBSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py deleted file mode 100644 index 11358f899e51..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureMariaDBSource(CopySource): - """A copy activity Azure MariaDB source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py index a06c722279f2..7076e0e57d91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py @@ -15,8 +15,6 @@ class AzureMariaDBTableDataset(Dataset): """Azure Database for MariaDB dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureMariaDBTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureMariaDBTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class AzureMariaDBTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py deleted file mode 100644 index 9c6fd648af20..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py index f6c7c75a1299..8e50d2477375 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py @@ -15,12 +15,10 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): """Azure ML Batch Execution activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -74,9 +72,9 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, } - def __init__(self, **kwargs): - super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) - self.global_parameters = kwargs.get('global_parameters', None) - self.web_service_outputs = kwargs.get('web_service_outputs', None) - self.web_service_inputs = kwargs.get('web_service_inputs', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None): + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py deleted file mode 100644 index e273c0b38128..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML - Batch Execution Service endpoint. Keys must match the names of web service - parameters defined in the published Azure ML web service. Values will be - passed in the GlobalParameters property of the Azure ML batch execution - request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying - the output Blob locations. This information will be passed in the - WebServiceOutputs property of the Azure ML batch execution request. - :type web_service_outputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML - endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying - the input Blob locations.. This information will be passed in the - WebServiceInputs property of the Azure ML batch execution request. - :type web_service_inputs: dict[str, - ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.global_parameters = global_parameters - self.web_service_outputs = web_service_outputs - self.web_service_inputs = web_service_inputs - self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py index 08dfec98a6bf..54ece64b3755 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -15,8 +15,6 @@ class AzureMLLinkedService(LinkedService): """Azure ML Web Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,12 @@ class AzureMLLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML - Web Service endpoint. Type: string (or Expression with resultType string). + :param ml_endpoint: The Batch Execution REST URL for an Azure ML Web + Service endpoint. Type: string (or Expression with resultType string). :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model - endpoint. + :param api_key: The API key for accessing the Azure ML model endpoint. :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Web Service endpoint. Type: string (or Expression with resultType @@ -82,13 +79,13 @@ class AzureMLLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMLLinkedService, self).__init__(**kwargs) - self.ml_endpoint = kwargs.get('ml_endpoint', None) - self.api_key = kwargs.get('api_key', None) - self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, ml_endpoint, api_key, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py deleted file mode 100644 index c77a692adc03..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureMLLinkedService(LinkedService): - """Azure ML Web Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML - Web Service endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model - endpoint. - :type api_key: ~azure.mgmt.datafactory.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure - ML Web Service endpoint. Type: string (or Expression with resultType - string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against the ARM-based updateResourceEndpoint of an Azure ML - web service. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.ml_endpoint = ml_endpoint - self.api_key = api_key - self.update_resource_endpoint = update_resource_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py index c47a2d81648e..ef1ef1c49aca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py @@ -15,12 +15,10 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): """Azure ML Update Resource management activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,23 +26,23 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in - the Web Service experiment to be updated. Type: string (or Expression with + :param trained_model_name: Name of the Trained Model module in the Web + Service experiment to be updated. Type: string (or Expression with resultType string). :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage - linked service holding the .ilearner file that will be uploaded by the - update operation. + :param trained_model_linked_service_name: Name of Azure Storage linked + service holding the .ilearner file that will be uploaded by the update + operation. :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in + :param trained_model_file_path: The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). @@ -73,9 +71,9 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) - self.trained_model_name = kwargs.get('trained_model_name', None) - self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) - self.trained_model_file_path = kwargs.get('trained_model_file_path', None) + def __init__(self, name, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None): + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py deleted file mode 100644 index 50a5932f0bf0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in - the Web Service experiment to be updated. Type: string (or Expression with - resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage - linked service holding the .ilearner file that will be uploaded by the - update operation. - :type trained_model_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in - trainedModelLinkedService to represent the .ilearner file that will be - uploaded by the update operation. Type: string (or Expression with - resultType string). - :type trained_model_file_path: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, - } - - def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.trained_model_name = trained_model_name - self.trained_model_linked_service_name = trained_model_linked_service_name - self.trained_model_file_path = trained_model_file_path - self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py index 682b24fed830..381eef2be708 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py @@ -15,14 +15,12 @@ class AzureMLWebServiceFile(Model): """Azure ML WebService Input/Output file. - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container - name, in the Azure Blob Storage specified by the LinkedService. Type: - string (or Expression with resultType string). + :param file_path: The relative file path, including container name, in the + Azure Blob Storage specified by the LinkedService. Type: string (or + Expression with resultType string). :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage - LinkedService, where Azure ML WebService Input/Output file located. + :param linked_service_name: Reference to an Azure Storage LinkedService, + where Azure ML WebService Input/Output file located. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ @@ -37,7 +35,7 @@ class AzureMLWebServiceFile(Model): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, } - def __init__(self, **kwargs): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = kwargs.get('file_path', None) - self.linked_service_name = kwargs.get('linked_service_name', None) + def __init__(self, file_path, linked_service_name): + super(AzureMLWebServiceFile, self).__init__() + self.file_path = file_path + self.linked_service_name = linked_service_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py deleted file mode 100644 index abe75d9d9bf2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class AzureMLWebServiceFile(Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container - name, in the Azure Blob Storage specified by the LinkedService. Type: - string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage - LinkedService, where Azure ML WebService Input/Output file located. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = file_path - self.linked_service_name = linked_service_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py index aedbdbb73eb5..fe14ad2966e6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -15,8 +15,6 @@ class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class AzureMySqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -63,9 +61,9 @@ class AzureMySqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py deleted file mode 100644 index 57692275f564..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py index b3ee0bbc8645..255a4c4fa89b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py @@ -15,8 +15,6 @@ class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -60,7 +58,7 @@ class AzureMySqlSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMySqlSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py deleted file mode 100644 index 340c10f5988b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureMySqlSink(CopySink): - """A copy activity Azure MySql sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py index 823336432567..e76460749826 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -15,8 +15,6 @@ class AzureMySqlSource(CopySource): """A copy activity Azure MySQL source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureMySqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class AzureMySqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMySqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py deleted file mode 100644 index 7030738d2615..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureMySqlSource(CopySource): - """A copy activity Azure MySQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py index 8f5d43478089..b8b80bf78b6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py @@ -15,8 +15,6 @@ class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureMySqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureMySqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class AzureMySqlTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py deleted file mode 100644 index 7bd7eb6f17f8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Azure MySQL database table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py index 92359d6d6a10..99709300a746 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -15,8 +15,6 @@ class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -62,9 +60,9 @@ class AzurePostgreSqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None): + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py deleted file mode 100644 index 47f8f17980f8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py index 6214e1ba1f22..43c35505301d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py @@ -15,8 +15,6 @@ class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -60,7 +58,7 @@ class AzurePostgreSqlSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzurePostgreSqlSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py deleted file mode 100644 index b7cd0ec51a29..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzurePostgreSqlSink(CopySink): - """A copy activity Azure PostgreSQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py index e0cd62fd8028..137b34f516ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -15,8 +15,6 @@ class AzurePostgreSqlSource(CopySource): """A copy activity Azure PostgreSQL source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzurePostgreSqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class AzurePostgreSqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzurePostgreSqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py deleted file mode 100644 index 0362b0dca390..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzurePostgreSqlSource(CopySource): - """A copy activity Azure PostgreSQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py index 933264b57a9b..012f7a3f2036 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py @@ -15,8 +15,6 @@ class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzurePostgreSqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzurePostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with @@ -76,9 +74,9 @@ class AzurePostgreSqlTableDataset(Dataset): 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None): + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py deleted file mode 100644 index 485dc3efb102..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Azure PostgreSQL database which - includes both schema and table. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type table: object - :param azure_postgre_sql_table_dataset_schema: The schema name of the - Azure PostgreSQL database. Type: string (or Expression with resultType - string). - :type azure_postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema - self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py index 9f3a63db4978..c7ab9fdd8579 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -15,8 +15,6 @@ class AzureQueueSink(CopySink): """A copy activity Azure Queue sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -46,16 +44,6 @@ class AzureQueueSink(CopySink): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(AzureQueueSink, self).__init__(**kwargs) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None): + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py deleted file mode 100644 index db2fb60ddb1e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureQueueSink(CopySink): - """A copy activity Azure Queue sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py index 1239bbad78fc..ea61b0305b24 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py @@ -15,8 +15,6 @@ class AzureSearchIndexDataset(Dataset): """The Azure Search Index. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureSearchIndexDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class AzureSearchIndexDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param index_name: Required. The name of the Azure Search Index. Type: - string (or Expression with resultType string). + :param index_name: The name of the Azure Search Index. Type: string (or + Expression with resultType string). :type index_name: object """ @@ -67,7 +65,7 @@ class AzureSearchIndexDataset(Dataset): 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.index_name = kwargs.get('index_name', None) + def __init__(self, linked_service_name, index_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.index_name = index_name self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py deleted file mode 100644 index da5e92dd2edd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index_name: Required. The name of the Azure Search Index. Type: - string (or Expression with resultType string). - :type index_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.index_name = index_name - self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index 9aae64af8da0..a024632fccf0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -15,8 +15,6 @@ class AzureSearchIndexSink(CopySink): """A copy activity Azure Search Index sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: 'Merge', 'Upload' @@ -61,7 +59,7 @@ class AzureSearchIndexSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, **kwargs): - super(AzureSearchIndexSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None): + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py deleted file mode 100644 index 3cd887a2512c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureSearchIndexSink(CopySink): - """A copy activity Azure Search Index sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py index 782799cd5b28..6433182e9e36 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -15,8 +15,6 @@ class AzureSearchLinkedService(LinkedService): """Linked service for Windows Azure Search Service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class AzureSearchLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. URL for Azure Search service. Type: string (or - Expression with resultType string). + :param url: URL for Azure Search service. Type: string (or Expression with + resultType string). :type url: object :param key: Admin Key for Azure Search service :type key: ~azure.mgmt.datafactory.models.SecretBase @@ -61,9 +59,9 @@ class AzureSearchLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSearchLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.key = kwargs.get('key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, key=None, encrypted_credential=None): + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py deleted file mode 100644 index 8589c3aead91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. URL for Azure Search service. Type: string (or - Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service - :type key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.key = key - self.encrypted_credential = encrypted_credential - self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py index 0da66637a04f..effb7eb59abc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -15,8 +15,6 @@ class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -76,12 +74,12 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py deleted file mode 100644 index dbcf6c88b134..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Database. Type: string (or Expression with - resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Database. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py index cc7c9d58d19f..848003cb5076 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -15,8 +15,6 @@ class AzureSqlDWLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class AzureSqlDWLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object @@ -77,12 +75,12 @@ class AzureSqlDWLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py deleted file mode 100644 index 5c75f3904b37..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString - or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Data Warehouse. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Data Warehouse. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py index ed9fe8904d73..d9f0afadea50 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -15,8 +15,6 @@ class AzureSqlDWTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureSqlDWTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureSqlDWTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class AzureSqlDWTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None): + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema + self.table = table self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py deleted file mode 100644 index a38e4ab479c9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL - Data Warehouse. Type: string (or Expression with resultType string). - :type azure_sql_dw_table_dataset_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string - (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema - self.table = table - self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py index 2aab3a145ff2..61fd2b4a086c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py @@ -15,8 +15,6 @@ class AzureSqlMILinkedService(LinkedService): """Azure SQL Managed Instance linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class AzureSqlMILinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -76,12 +74,12 @@ class AzureSqlMILinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlMILinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py deleted file mode 100644 index ec1a2e5e8549..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to - authenticate against Azure SQL Managed Instance. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Azure SQL Managed Instance. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential - self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py index 1128a9e8cb06..ea3b9da76b59 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py @@ -15,8 +15,6 @@ class AzureSqlMITableDataset(Dataset): """The Azure SQL Managed Instance dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureSqlMITableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureSqlMITableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class AzureSqlMITableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlMITableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None): + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py deleted file mode 100644 index ac72614e3ed4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL - Managed Instance. Type: string (or Expression with resultType string). - :type azure_sql_mi_table_dataset_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. - Type: string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema - self.table = table - self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py index 5d93df3d790a..2658443950a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -15,8 +15,6 @@ class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -82,12 +80,12 @@ class AzureSqlSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py deleted file mode 100644 index e4d5e66e18c5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureSqlSink(CopySink): - """A copy activity Azure SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py index b6c62f9a3164..6595bded5f78 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -15,8 +15,6 @@ class AzureSqlSource(CopySource): """A copy activity Azure SQL source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureSqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -64,10 +62,10 @@ class AzureSqlSource(CopySource): 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None): + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py deleted file mode 100644 index cb5c33d28bb2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureSqlSource(CopySource): - """A copy activity Azure SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py index ce8b08944f3a..29e98f4e0525 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -15,8 +15,6 @@ class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureSqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class AzureSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class AzureSqlTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None): + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema + self.table = table self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py deleted file mode 100644 index 3ed19ee47e7e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param azure_sql_table_dataset_schema: The schema name of the Azure SQL - database. Type: string (or Expression with resultType string). - :type azure_sql_table_dataset_schema: object - :param table: The table name of the Azure SQL database. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema - self.table = table - self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py index 202dd7229b90..b5da68a504f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -15,8 +15,6 @@ class AzureStorageLinkedService(LinkedService): """The storage account linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureStorageLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or @@ -73,11 +71,11 @@ class AzureStorageLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, **kwargs): - super(AzureStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential=None): + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py deleted file mode 100644 index 4fac19b70849..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential - self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py index eb8dacbfbb98..d9ebbf0858b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py @@ -15,8 +15,6 @@ class AzureTableDataset(Dataset): """The Azure Table storage dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class AzureTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class AzureTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param table_name: Required. The table name of the Azure Table storage. - Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure Table storage. Type: string + (or Expression with resultType string). :type table_name: object """ @@ -67,7 +65,7 @@ class AzureTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, table_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py deleted file mode 100644 index d70a15fdd6f1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The table name of the Azure Table storage. - Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py index 3459c9ad3ba1..0a171bc176d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -15,8 +15,6 @@ class AzureTableSink(CopySink): """A copy activity Azure Table sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). @@ -72,10 +70,10 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureTableSink, self).__init__(**kwargs) - self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) - self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) - self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) - self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None): + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py deleted file mode 100644 index a15247544879..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class AzureTableSink(CopySink): - """A copy activity Azure Table sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_default_partition_key_value: Azure Table default - partition key value. Type: string (or Expression with resultType string). - :type azure_table_default_partition_key_value: object - :param azure_table_partition_key_name: Azure Table partition key name. - Type: string (or Expression with resultType string). - :type azure_table_partition_key_name: object - :param azure_table_row_key_name: Azure Table row key name. Type: string - (or Expression with resultType string). - :type azure_table_row_key_name: object - :param azure_table_insert_type: Azure Table insert type. Type: string (or - Expression with resultType string). - :type azure_table_insert_type: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, - 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, - 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, - 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.azure_table_default_partition_key_value = azure_table_default_partition_key_value - self.azure_table_partition_key_name = azure_table_partition_key_name - self.azure_table_row_key_name = azure_table_row_key_name - self.azure_table_insert_type = azure_table_insert_type - self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py index fa7ead73eaa9..74cd88d06fe5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -15,8 +15,6 @@ class AzureTableSource(CopySource): """A copy activity Azure Table source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureTableSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). @@ -56,8 +54,8 @@ class AzureTableSource(CopySource): 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, **kwargs): - super(AzureTableSource, self).__init__(**kwargs) - self.azure_table_source_query = kwargs.get('azure_table_source_query', None) - self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None): + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py deleted file mode 100644 index efbac5613219..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class AzureTableSource(CopySource): - """A copy activity Azure Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param azure_table_source_query: Azure Table source query. Type: string - (or Expression with resultType string). - :type azure_table_source_query: object - :param azure_table_source_ignore_table_not_found: Azure Table source - ignore table not found. Type: boolean (or Expression with resultType - boolean). - :type azure_table_source_ignore_table_not_found: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, - 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.azure_table_source_query = azure_table_source_query - self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found - self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py index c2a8c2498ea6..b120959ddc85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -15,8 +15,6 @@ class AzureTableStorageLinkedService(LinkedService): """The azure table storage linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class AzureTableStorageLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or @@ -73,11 +71,11 @@ class AzureTableStorageLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, **kwargs): - super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential=None): + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py deleted file mode 100644 index 8d4e62c4f3e6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: The connection string. It is mutually exclusive - with sasUri property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually - exclusive with connectionString property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas - uri. - :type sas_token: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential - self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py index 5f0f8ef96696..8a6b1456862e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py @@ -15,8 +15,6 @@ class BinaryDataset(Dataset): """Binary dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class BinaryDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,9 +39,9 @@ class BinaryDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param location: Required. The location of the Binary storage. + :param location: The location of the Binary storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression: The data compression method used for the binary dataset. @@ -70,8 +68,8 @@ class BinaryDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(BinaryDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None): + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.location = location + self.compression = compression self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py deleted file mode 100644 index 7d26b216fd7a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class BinaryDataset(Dataset): - """Binary dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the Binary storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression: The data compression method used for the binary - dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.compression = compression - self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py index b991bfee53c7..4c8c886fdded 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py @@ -15,8 +15,6 @@ class BinarySink(CopySink): """A copy activity Binary sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -59,7 +57,7 @@ class BinarySink(CopySink): 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, **kwargs): - super(BinarySink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None): + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py deleted file mode 100644 index 80421d161aed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class BinarySink(CopySink): - """A copy activity Binary sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py index 48e78e7d24bf..de4d6d964e12 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py @@ -15,8 +15,6 @@ class BinarySource(CopySource): """A copy activity Binary source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class BinarySource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -50,7 +48,7 @@ class BinarySource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, **kwargs): - super(BinarySource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py deleted file mode 100644 index aa9a9f1412ab..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class BinarySource(CopySource): - """A copy activity Binary source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Binary store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py index 673d34167fed..9caa7f29f6d2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -18,8 +18,6 @@ class BlobEventsTrigger(MultiplePipelineTrigger): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -33,7 +31,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: @@ -49,10 +47,9 @@ class BlobEventsTrigger(MultiplePipelineTrigger): fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. :type blob_path_ends_with: str - :param events: Required. The type of events that cause this trigger to - fire. + :param events: The type of events that cause this trigger to fire. :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. + :param scope: The ARM resource ID of the Storage Account. :type scope: str """ @@ -76,10 +73,10 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } - def __init__(self, **kwargs): - super(BlobEventsTrigger, self).__init__(**kwargs) - self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) - self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) - self.events = kwargs.get('events', None) - self.scope = kwargs.get('scope', None) + def __init__(self, events, scope, additional_properties=None, description=None, annotations=None, pipelines=None, blob_path_begins_with=None, blob_path_ends_with=None): + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines) + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.events = events + self.scope = scope self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py deleted file mode 100644 index fb65a420a2cd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - - -class BlobEventsTrigger(MultiplePipelineTrigger): - """Trigger that runs every time a Blob event occurs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param blob_path_begins_with: The blob path must begin with the pattern - provided for trigger to fire. For example, '/records/blobs/december/' will - only fire the trigger for blobs in the december folder under the records - container. At least one of these must be provided: blobPathBeginsWith, - blobPathEndsWith. - :type blob_path_begins_with: str - :param blob_path_ends_with: The blob path must end with the pattern - provided for trigger to fire. For example, 'december/boxes.csv' will only - fire the trigger for blobs named boxes in a december folder. At least one - of these must be provided: blobPathBeginsWith, blobPathEndsWith. - :type blob_path_ends_with: str - :param events: Required. The type of events that cause this trigger to - fire. - :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: Required. The ARM resource ID of the Storage Account. - :type scope: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'events': {'required': True}, - 'scope': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, - 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, - 'events': {'key': 'typeProperties.events', 'type': '[str]'}, - 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, - } - - def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.blob_path_begins_with = blob_path_begins_with - self.blob_path_ends_with = blob_path_ends_with - self.events = events - self.scope = scope - self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index 284e0fcecde5..df4ac9146607 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -15,8 +15,6 @@ class BlobSink(CopySink): """A copy activity Azure Blob sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). @@ -71,10 +69,10 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, **kwargs): - super(BlobSink, self).__init__(**kwargs) - self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) - self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) - self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) - self.copy_behavior = kwargs.get('copy_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None): + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py deleted file mode 100644 index 370acc72e017..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class BlobSink(CopySink): - """A copy activity Azure Blob sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param blob_writer_overwrite_files: Blob writer overwrite files. Type: - boolean (or Expression with resultType boolean). - :type blob_writer_overwrite_files: object - :param blob_writer_date_time_format: Blob writer date time format. Type: - string (or Expression with resultType string). - :type blob_writer_date_time_format: object - :param blob_writer_add_header: Blob writer add header. Type: boolean (or - Expression with resultType boolean). - :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, - 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, - 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.blob_writer_overwrite_files = blob_writer_overwrite_files - self.blob_writer_date_time_format = blob_writer_date_time_format - self.blob_writer_add_header = blob_writer_add_header - self.copy_behavior = copy_behavior - self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py index ab4313a890cb..0e2ef494a7b3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -15,8 +15,6 @@ class BlobSource(CopySource): """A copy activity Azure Blob source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class BlobSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). @@ -60,9 +58,9 @@ class BlobSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, **kwargs): - super(BlobSource, self).__init__(**kwargs) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_header_line_count = kwargs.get('skip_header_line_count', None) - self.recursive = kwargs.get('recursive', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None): + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py deleted file mode 100644 index 78d90cc61e13..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class BlobSource(CopySource): - """A copy activity Azure Blob source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param treat_empty_as_null: Treat empty as null. Type: boolean (or - Expression with resultType boolean). - :type treat_empty_as_null: object - :param skip_header_line_count: Number of header lines to skip from each - blob. Type: integer (or Expression with resultType integer). - :type skip_header_line_count: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive - self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py index 4fd5b5c17100..981fc4aa3b66 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -18,8 +18,6 @@ class BlobTrigger(MultiplePipelineTrigger): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -33,19 +31,18 @@ class BlobTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will - trigger the pipeline. + :param folder_path: The path of the container/folder that will trigger the + pipeline. :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to - handle when it is triggered. + :param max_concurrency: The max number of parallel files to handle when it + is triggered. :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service - reference. + :param linked_service: The Azure Storage linked service reference. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ @@ -70,9 +67,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } - def __init__(self, **kwargs): - super(BlobTrigger, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.linked_service = kwargs.get('linked_service', None) + def __init__(self, folder_path, max_concurrency, linked_service, additional_properties=None, description=None, annotations=None, pipelines=None): + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines) + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py deleted file mode 100644 index cccffd881bfb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - - -class BlobTrigger(MultiplePipelineTrigger): - """Trigger that runs every time the selected Blob container changes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: Required. The path of the container/folder that will - trigger the pipeline. - :type folder_path: str - :param max_concurrency: Required. The max number of parallel files to - handle when it is triggered. - :type max_concurrency: int - :param linked_service: Required. The Azure Storage linked service - reference. - :type linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'folder_path': {'required': True}, - 'max_concurrency': {'required': True}, - 'linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, - } - - def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.folder_path = folder_path - self.max_concurrency = max_concurrency - self.linked_service = linked_service - self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py index ebba2be42028..ea4b378af8fc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -15,8 +15,6 @@ class CassandraLinkedService(LinkedService): """Linked service for Cassandra data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class CassandraLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. Host name for connection. Type: string (or - Expression with resultType string). + :param host: Host name for connection. Type: string (or Expression with + resultType string). :type host: object :param authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). @@ -73,12 +71,12 @@ class CassandraLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CassandraLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None): + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py deleted file mode 100644 index f22f303cc61d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name for connection. Type: string (or - Expression with resultType string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression - with resultType integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.authentication_type = authentication_type - self.port = port - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index e7ba96c18682..28cccb9c05fe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -15,8 +15,6 @@ class CassandraSource(CopySource): """A copy activity source for a Cassandra database. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class CassandraSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with @@ -63,8 +61,8 @@ class CassandraSource(CopySource): 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, **kwargs): - super(CassandraSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.consistency_level = kwargs.get('consistency_level', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None): + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.consistency_level = consistency_level self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py deleted file mode 100644 index bd95d158b868..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CassandraSource(CopySource): - """A copy activity source for a Cassandra database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression or - Cassandra Query Language (CQL) command. Type: string (or Expression with - resultType string). - :type query: object - :param consistency_level: The consistency level specifies how many - Cassandra servers must respond to a read request before returning data to - the client application. Cassandra checks the specified number of Cassandra - servers for data to satisfy the read request. Must be one of - cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.consistency_level = consistency_level - self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py index b89c324fd4d4..b1f9bb81e662 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py @@ -15,8 +15,6 @@ class CassandraTableDataset(Dataset): """The Cassandra database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class CassandraTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class CassandraTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). @@ -70,8 +68,8 @@ class CassandraTableDataset(Dataset): 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CassandraTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.keyspace = kwargs.get('keyspace', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None): + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.keyspace = keyspace self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py deleted file mode 100644 index 256358ce50cb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name of the Cassandra database. Type: string - (or Expression with resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or - Expression with resultType string). - :type keyspace: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.keyspace = keyspace - self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py index c7cd4c315852..eb89a6b5b83f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py @@ -15,8 +15,6 @@ class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class CommonDataServiceForAppsEntityDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class CommonDataServiceForAppsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class CommonDataServiceForAppsEntityDataset(Dataset): 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None): + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.entity_name = entity_name self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py deleted file mode 100644 index 050bdb836963..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py index bbc8b7a0de65..5629ce8fb018 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py @@ -15,8 +15,6 @@ class CommonDataServiceForAppsLinkedService(LinkedService): """Common Data Service for Apps linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,13 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param deployment_type: Required. The deployment type of the Common Data - Service for Apps instance. 'Online' for Common Data Service for Apps - Online and 'OnPremisesWithIfd' for Common Data Service for Apps - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' + :param deployment_type: The deployment type of the Common Data Service for + Apps instance. 'Online' for Common Data Service for Apps Online and + 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. + Type: string (or Expression with resultType string). Possible values + include: 'Online', 'OnPremisesWithIfd' :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Common Data Service for @@ -58,14 +56,14 @@ class CommonDataServiceForAppsLinkedService(LinkedService): associated with the user. Type: string (or Expression with resultType string). :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Common Data Service for Apps server. 'Office365' for online scenario, - 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with - resultType string). Possible values include: 'Office365', 'Ifd' + :param authentication_type: The authentication type to connect to Common + Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Common Data Service for - Apps instance. Type: string (or Expression with resultType string). + :param username: User name to access the Common Data Service for Apps + instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Common Data Service for Apps instance. @@ -101,15 +99,15 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None): + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py deleted file mode 100644 index 1c4897c09868..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Common Data - Service for Apps instance. 'Online' for Common Data Service for Apps - Online and 'OnPremisesWithIfd' for Common Data Service for Apps - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for - Apps server. The property is required for on-prem and not allowed for - online. Type: string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. - The property is required for on-prem and not allowed for online. Default - is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps - server. The property is required for on-line and not allowed for on-prem. - Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service - for Apps instance. The property is required for on-prem and required for - online when there are more than one Common Data Service for Apps instances - associated with the user. Type: string (or Expression with resultType - string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Common Data Service for Apps server. 'Office365' for online scenario, - 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with - resultType string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Common Data Service for - Apps instance. Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py index 0df48841cccc..675861c7ce81 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py @@ -18,8 +18,6 @@ class CommonDataServiceForAppsSink(CopySink): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -41,10 +39,10 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . + :ivar write_behavior: The write behavior for the operation. Default value: + "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. @@ -71,7 +69,7 @@ class CommonDataServiceForAppsSink(CopySink): write_behavior = "Upsert" - def __init__(self, **kwargs): - super(CommonDataServiceForAppsSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None): + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.ignore_null_values = ignore_null_values self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py deleted file mode 100644 index 80f85e6d5809..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class CommonDataServiceForAppsSink(CopySink): - """A copy activity Common Data Service for Apps sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py index 13d2a6b921bb..16ffa2e59da7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py @@ -15,8 +15,6 @@ class CommonDataServiceForAppsSource(CopySource): """A copy activity Common Data Service for Apps source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class CommonDataServiceForAppsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: @@ -52,7 +50,7 @@ class CommonDataServiceForAppsSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CommonDataServiceForAppsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py deleted file mode 100644 index 713db90f9623..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CommonDataServiceForAppsSource(CopySource): - """A copy activity Common Data Service for Apps source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Common Data Service for Apps (online & on-premises). Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py index 04179d0d1f53..9acb7cbe90f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -15,8 +15,6 @@ class ConcurLinkedService(LinkedService): """Concur Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,11 @@ class ConcurLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param client_id: Required. Application client_id supplied by Concur App - Management. + :param client_id: Application client_id supplied by Concur App Management. :type client_id: object - :param username: Required. The user name that you use to access Concur - Service. + :param username: The user name that you use to access Concur Service. :type username: object :param password: The password corresponding to the user name that you provided in the username field. @@ -80,13 +76,13 @@ class ConcurLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ConcurLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, client_id, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.client_id = client_id + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py deleted file mode 100644 index 4411db6d2856..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ConcurLinkedService(LinkedService): - """Concur Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. Application client_id supplied by Concur App - Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py index e2595f9d8aff..12ae8d1e6b70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py @@ -15,8 +15,6 @@ class ConcurObjectDataset(Dataset): """Concur Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ConcurObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ConcurObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class ConcurObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ConcurObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py deleted file mode 100644 index 9543a6395a32..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ConcurObjectDataset(Dataset): - """Concur Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py index 11ae557c0cda..7537b1f752c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -15,8 +15,6 @@ class ConcurSource(CopySource): """A copy activity Concur Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ConcurSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ConcurSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ConcurSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py deleted file mode 100644 index ac8ae8fb5a91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ConcurSource(CopySource): - """A copy activity Concur Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py index 2242bc36beb2..323767f25a9f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -20,12 +20,10 @@ class ControlActivity(Activity): SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -33,7 +31,7 @@ class ControlActivity(Activity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -42,19 +40,10 @@ class ControlActivity(Activity): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - _subtype_map = { 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } - def __init__(self, **kwargs): - super(ControlActivity, self).__init__(**kwargs) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None): + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py deleted file mode 100644 index 0aabd99d741f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .activity_py3 import Activity - - -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebHookActivity, AppendVariableActivity, - SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, - WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index 2e7c00d551ba..4c2223cf07dc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -15,12 +15,10 @@ class CopyActivity(ExecutionActivity): """Copy activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,16 +26,16 @@ class CopyActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Copy activity source. + :param source: Copy activity source. :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. + :param sink: Copy activity sink. :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. @@ -106,19 +104,19 @@ class CopyActivity(ExecutionActivity): 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, **kwargs): - super(CopyActivity, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.sink = kwargs.get('sink', None) - self.translator = kwargs.get('translator', None) - self.enable_staging = kwargs.get('enable_staging', None) - self.staging_settings = kwargs.get('staging_settings', None) - self.parallel_copies = kwargs.get('parallel_copies', None) - self.data_integration_units = kwargs.get('data_integration_units', None) - self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) - self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) - self.preserve_rules = kwargs.get('preserve_rules', None) - self.preserve = kwargs.get('preserve', None) - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) + def __init__(self, name, source, sink, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None): + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve + self.inputs = inputs + self.outputs = outputs self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py deleted file mode 100644 index f8a1fee5625d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class CopyActivity(ExecutionActivity): - """Copy activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Copy activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.mgmt.datafactory.models.CopySink - :param translator: Copy activity translator. If not specified, tabular - translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim - staging. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when - EnableStaging is true. - :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on - the source or sink to avoid overloading the data store. Type: integer (or - Expression with resultType integer), minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units - that can be used to perform this data movement. Type: integer (or - Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. - Default value is false. Type: boolean (or Expression with resultType - boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row - settings when EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - } - - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.source = source - self.sink = sink - self.translator = translator - self.enable_staging = enable_staging - self.staging_settings = staging_settings - self.parallel_copies = parallel_copies - self.data_integration_units = data_integration_units - self.enable_skip_incompatible_row = enable_skip_incompatible_row - self.redirect_incompatible_row_settings = redirect_incompatible_row_settings - self.preserve_rules = preserve_rules - self.preserve = preserve - self.inputs = inputs - self.outputs = outputs - self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index cbe8f2ecf8f7..99d85c435117 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -26,8 +26,6 @@ class CopySink(Model): SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, DelimitedTextSink - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -49,7 +47,7 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -71,12 +69,12 @@ class CopySink(Model): 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } - def __init__(self, **kwargs): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.write_batch_size = kwargs.get('write_batch_size', None) - self.write_batch_timeout = kwargs.get('write_batch_timeout', None) - self.sink_retry_count = kwargs.get('sink_retry_count', None) - self.sink_retry_wait = kwargs.get('sink_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None): + super(CopySink, self).__init__() + self.additional_properties = additional_properties + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py deleted file mode 100644 index 3720bece5674..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CopySink(Model): - """A copy activity sink. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, - DelimitedTextSink - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(CopySink, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.write_batch_size = write_batch_size - self.write_batch_timeout = write_batch_timeout - self.sink_retry_count = sink_retry_count - self.sink_retry_wait = sink_retry_wait - self.max_concurrent_connections = max_concurrent_connections - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index b37bca86de5e..37257f2785fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -38,8 +38,6 @@ class CopySource(Model): DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -54,7 +52,7 @@ class CopySource(Model): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -74,10 +72,10 @@ class CopySource(Model): 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } - def __init__(self, **kwargs): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None): + super(CopySource, self).__init__() + self.additional_properties = additional_properties + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py deleted file mode 100644 index 22b8c590498e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CopySource(Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, - OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, - SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, - XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, - HubspotSource, HiveSource, HBaseSource, GreenplumSource, - GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - JsonSource, DelimitedTextSource, ParquetSource, AvroSource - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(CopySource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.source_retry_count = source_retry_count - self.source_retry_wait = source_retry_wait - self.max_concurrent_connections = max_concurrent_connections - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py index 6a8a462f6f46..7723a1b2c4f9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -15,8 +15,6 @@ class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class CosmosDbLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in @@ -63,9 +61,9 @@ class CosmosDbLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CosmosDbLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None): + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.account_key = account_key + self.encrypted_credential = encrypted_credential self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py deleted file mode 100644 index 57dab80e06b9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.account_key = account_key - self.encrypted_credential = encrypted_credential - self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py index d86648eb5eee..56acfa06a8b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -15,8 +15,6 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param collection: Required. The collection name of the CosmosDB (MongoDB - API) database. Type: string (or Expression with resultType string). + :param collection: The collection name of the CosmosDB (MongoDB API) + database. Type: string (or Expression with resultType string). :type collection: object """ @@ -67,7 +65,7 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.collection = kwargs.get('collection', None) + def __init__(self, linked_service_name, collection, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.collection = collection self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py deleted file mode 100644 index de2650fa14b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the CosmosDB (MongoDB - API) database. Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection = collection - self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py index f76e7c5f5a41..202a1c4427e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -15,8 +15,6 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,15 +29,14 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The CosmosDB (MongoDB API) connection - string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - string, SecureString or AzureKeyVaultSecretReference. + :param connection_string: The CosmosDB (MongoDB API) connection string. + Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database - that you want to access. Type: string (or Expression with resultType - string). + :param database: The name of the CosmosDB (MongoDB API) database that you + want to access. Type: string (or Expression with resultType string). :type database: object """ @@ -60,8 +57,8 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'database': {'key': 'typeProperties.database', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.database = kwargs.get('database', None) + def __init__(self, connection_string, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.database = database self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py deleted file mode 100644 index 2a72bfce4ff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The CosmosDB (MongoDB API) connection - string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database - that you want to access. Type: string (or Expression with resultType - string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py index 0d40b52add80..d78eccb1030a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -15,8 +15,6 @@ class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default @@ -62,7 +60,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None): + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.write_behavior = write_behavior self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py deleted file mode 100644 index 5db1ee5c9d36..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class CosmosDbMongoDbApiSink(CopySink): - """A copy activity sink for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: Specifies whether the document with same key to be - overwritten (upsert) rather than throw exception (insert). The default - value is "insert". Type: string (or Expression with resultType string). - Type: string (or Expression with resultType string). - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py index 44dc7443427b..c6b65fef3192 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -15,8 +15,6 @@ class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class CosmosDbMongoDbApiSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty @@ -63,9 +61,9 @@ class CosmosDbMongoDbApiSource(CopySource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None): + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py deleted file mode 100644 index 7d180f105abf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CosmosDbMongoDbApiSource(CopySource): - """A copy activity source for a CosmosDB (MongoDB API) database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size - self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py index 76e45648f941..affcebbee4f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -15,8 +15,6 @@ class CouchbaseLinkedService(LinkedService): """Couchbase server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class CouchbaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -62,9 +60,9 @@ class CouchbaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None): + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.cred_string = cred_string + self.encrypted_credential = encrypted_credential self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py deleted file mode 100644 index afe336f666de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.cred_string = cred_string - self.encrypted_credential = encrypted_credential - self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py index b355605417d1..f11448fbaefb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -15,8 +15,6 @@ class CouchbaseSource(CopySource): """A copy activity Couchbase server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class CouchbaseSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class CouchbaseSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CouchbaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py deleted file mode 100644 index 1358fc20feba..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py index 821274b9aae4..513a707f500b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py @@ -15,8 +15,6 @@ class CouchbaseTableDataset(Dataset): """Couchbase server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class CouchbaseTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class CouchbaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class CouchbaseTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CouchbaseTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py deleted file mode 100644 index cf5299fd55a5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py index 0e7002dcf68a..fa4d41425516 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py @@ -35,9 +35,9 @@ class CreateLinkedIntegrationRuntimeRequest(Model): 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, } - def __init__(self, **kwargs): - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.data_factory_name = kwargs.get('data_factory_name', None) - self.data_factory_location = kwargs.get('data_factory_location', None) + def __init__(self, name=None, subscription_id=None, data_factory_name=None, data_factory_location=None): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__() + self.name = name + self.subscription_id = subscription_id + self.data_factory_name = data_factory_name + self.data_factory_location = data_factory_location diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py deleted file mode 100644 index aad7d6fa5ac0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CreateLinkedIntegrationRuntimeRequest(Model): - """The linked integration runtime information. - - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked - integration runtime belongs to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked - integration runtime belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the - linked integration runtime belongs to. - :type data_factory_location: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: - super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.name = name - self.subscription_id = subscription_id - self.data_factory_name = data_factory_name - self.data_factory_location = data_factory_location diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py index 18ec9f963e65..696b3fcea4c4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py @@ -15,9 +15,7 @@ class CreateRunResponse(Model): """Response body with a run identifier. - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. + :param run_id: Identifier of a run. :type run_id: str """ @@ -29,6 +27,6 @@ class CreateRunResponse(Model): 'run_id': {'key': 'runId', 'type': 'str'}, } - def __init__(self, **kwargs): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = kwargs.get('run_id', None) + def __init__(self, run_id): + super(CreateRunResponse, self).__init__() + self.run_id = run_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py deleted file mode 100644 index bb280441ae90..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CreateRunResponse(Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str - """ - - _validation = { - 'run_id': {'required': True}, - } - - _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__(self, *, run_id: str, **kwargs) -> None: - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = run_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py index 01cfb7335d37..79304132e099 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -15,12 +15,10 @@ class CustomActivity(ExecutionActivity): """Custom activity type. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,15 +26,15 @@ class CustomActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or - Expression with resultType string). + :param command: Command for custom activity Type: string (or Expression + with resultType string). :type command: object :param resource_linked_service: Resource linked service reference. :type resource_linked_service: @@ -80,12 +78,12 @@ class CustomActivity(ExecutionActivity): 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CustomActivity, self).__init__(**kwargs) - self.command = kwargs.get('command', None) - self.resource_linked_service = kwargs.get('resource_linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_objects = kwargs.get('reference_objects', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + def __init__(self, name, command, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None): + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py deleted file mode 100644 index bf8326f053f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class CustomActivity(ExecutionActivity): - """Custom activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or - Expression with resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or - Expression with resultType string). - :type folder_path: object - :param reference_objects: Reference objects - :type reference_objects: - ~azure.mgmt.datafactory.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no - restriction on the keys or values that can be used. The user specified - custom activity has the full responsibility to consume and interpret the - content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted - for custom activity. Type: double (or Expression with resultType double). - :type retention_time_in_days: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'command': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - } - - def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.command = command - self.resource_linked_service = resource_linked_service - self.folder_path = folder_path - self.reference_objects = reference_objects - self.extended_properties = extended_properties - self.retention_time_in_days = retention_time_in_days - self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py index 5f95a54612dd..bcf61066590b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py @@ -27,7 +27,7 @@ class CustomActivityReferenceObject(Model): 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, } - def __init__(self, **kwargs): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = kwargs.get('linked_services', None) - self.datasets = kwargs.get('datasets', None) + def __init__(self, linked_services=None, datasets=None): + super(CustomActivityReferenceObject, self).__init__() + self.linked_services = linked_services + self.datasets = datasets diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py deleted file mode 100644 index f860f0141bd0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class CustomActivityReferenceObject(Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = linked_services - self.datasets = datasets diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py index db14a05e7ad1..b290ca34a41c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -15,8 +15,6 @@ class CustomDataSourceLinkedService(LinkedService): """Custom linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class CustomDataSourceLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param type_properties: Required. Custom linked service properties. + :param type_properties: Custom linked service properties. :type type_properties: object """ @@ -52,7 +50,7 @@ class CustomDataSourceLinkedService(LinkedService): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) + def __init__(self, type_properties, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.type_properties = type_properties self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py deleted file mode 100644 index f7633ee28cbd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Custom linked service properties. - :type type_properties: object - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type_properties = type_properties - self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py index a242309c3fd1..370350c6d6f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -15,8 +15,6 @@ class CustomDataset(Dataset): """The custom dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class CustomDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class CustomDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param type_properties: Custom dataset properties. :type type_properties: object @@ -65,7 +63,7 @@ class CustomDataset(Dataset): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, **kwargs): - super(CustomDataset, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None): + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.type_properties = type_properties self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py deleted file mode 100644 index c00dae2b2c56..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class CustomDataset(Dataset): - """The custom dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Custom dataset properties. - :type type_properties: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: - super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type_properties = type_properties - self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index 8b3e08f32768..ad33efdcc1d7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -12,7 +12,7 @@ from enum import Enum -class IntegrationRuntimeState(str, Enum): +class IntegrationRuntimeState(Enum): initial = "Initial" stopped = "Stopped" @@ -26,13 +26,13 @@ class IntegrationRuntimeState(str, Enum): access_denied = "AccessDenied" -class IntegrationRuntimeAutoUpdate(str, Enum): +class IntegrationRuntimeAutoUpdate(Enum): on = "On" off = "Off" -class ParameterType(str, Enum): +class ParameterType(Enum): object_enum = "Object" string = "String" @@ -43,7 +43,7 @@ class ParameterType(str, Enum): secure_string = "SecureString" -class DependencyCondition(str, Enum): +class DependencyCondition(Enum): succeeded = "Succeeded" failed = "Failed" @@ -51,21 +51,21 @@ class DependencyCondition(str, Enum): completed = "Completed" -class VariableType(str, Enum): +class VariableType(Enum): string = "String" bool_enum = "Bool" array = "Array" -class TriggerRuntimeState(str, Enum): +class TriggerRuntimeState(Enum): started = "Started" stopped = "Stopped" disabled = "Disabled" -class EventSubscriptionStatus(str, Enum): +class EventSubscriptionStatus(Enum): enabled = "Enabled" provisioning = "Provisioning" @@ -74,7 +74,7 @@ class EventSubscriptionStatus(str, Enum): unknown = "Unknown" -class RunQueryFilterOperand(str, Enum): +class RunQueryFilterOperand(Enum): pipeline_name = "PipelineName" status = "Status" @@ -90,7 +90,7 @@ class RunQueryFilterOperand(str, Enum): latest_only = "LatestOnly" -class RunQueryFilterOperator(str, Enum): +class RunQueryFilterOperator(Enum): equals = "Equals" not_equals = "NotEquals" @@ -98,7 +98,7 @@ class RunQueryFilterOperator(str, Enum): not_in = "NotIn" -class RunQueryOrderByField(str, Enum): +class RunQueryOrderByField(Enum): run_start = "RunStart" run_end = "RunEnd" @@ -111,32 +111,32 @@ class RunQueryOrderByField(str, Enum): trigger_run_timestamp = "TriggerRunTimestamp" -class RunQueryOrder(str, Enum): +class RunQueryOrder(Enum): asc = "ASC" desc = "DESC" -class TriggerRunStatus(str, Enum): +class TriggerRunStatus(Enum): succeeded = "Succeeded" failed = "Failed" inprogress = "Inprogress" -class TumblingWindowFrequency(str, Enum): +class TumblingWindowFrequency(Enum): minute = "Minute" hour = "Hour" -class BlobEventTypes(str, Enum): +class BlobEventTypes(Enum): microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" -class DayOfWeek(str, Enum): +class DayOfWeek(Enum): sunday = "Sunday" monday = "Monday" @@ -147,7 +147,7 @@ class DayOfWeek(str, Enum): saturday = "Saturday" -class DaysOfWeek(str, Enum): +class DaysOfWeek(Enum): sunday = "Sunday" monday = "Monday" @@ -158,7 +158,7 @@ class DaysOfWeek(str, Enum): saturday = "Saturday" -class RecurrenceFrequency(str, Enum): +class RecurrenceFrequency(Enum): not_specified = "NotSpecified" minute = "Minute" @@ -169,27 +169,27 @@ class RecurrenceFrequency(str, Enum): year = "Year" -class GoogleAdWordsAuthenticationType(str, Enum): +class GoogleAdWordsAuthenticationType(Enum): service_authentication = "ServiceAuthentication" user_authentication = "UserAuthentication" -class SparkServerType(str, Enum): +class SparkServerType(Enum): shark_server = "SharkServer" shark_server2 = "SharkServer2" spark_thrift_server = "SparkThriftServer" -class SparkThriftTransportProtocol(str, Enum): +class SparkThriftTransportProtocol(Enum): binary = "Binary" sasl = "SASL" http = "HTTP " -class SparkAuthenticationType(str, Enum): +class SparkAuthenticationType(Enum): anonymous = "Anonymous" username = "Username" @@ -197,47 +197,47 @@ class SparkAuthenticationType(str, Enum): windows_azure_hd_insight_service = "WindowsAzureHDInsightService" -class ServiceNowAuthenticationType(str, Enum): +class ServiceNowAuthenticationType(Enum): basic = "Basic" oauth2 = "OAuth2" -class PrestoAuthenticationType(str, Enum): +class PrestoAuthenticationType(Enum): anonymous = "Anonymous" ldap = "LDAP" -class PhoenixAuthenticationType(str, Enum): +class PhoenixAuthenticationType(Enum): anonymous = "Anonymous" username_and_password = "UsernameAndPassword" windows_azure_hd_insight_service = "WindowsAzureHDInsightService" -class ImpalaAuthenticationType(str, Enum): +class ImpalaAuthenticationType(Enum): anonymous = "Anonymous" sasl_username = "SASLUsername" username_and_password = "UsernameAndPassword" -class HiveServerType(str, Enum): +class HiveServerType(Enum): hive_server1 = "HiveServer1" hive_server2 = "HiveServer2" hive_thrift_server = "HiveThriftServer" -class HiveThriftTransportProtocol(str, Enum): +class HiveThriftTransportProtocol(Enum): binary = "Binary" sasl = "SASL" http = "HTTP " -class HiveAuthenticationType(str, Enum): +class HiveAuthenticationType(Enum): anonymous = "Anonymous" username = "Username" @@ -245,37 +245,37 @@ class HiveAuthenticationType(str, Enum): windows_azure_hd_insight_service = "WindowsAzureHDInsightService" -class HBaseAuthenticationType(str, Enum): +class HBaseAuthenticationType(Enum): anonymous = "Anonymous" basic = "Basic" -class GoogleBigQueryAuthenticationType(str, Enum): +class GoogleBigQueryAuthenticationType(Enum): service_authentication = "ServiceAuthentication" user_authentication = "UserAuthentication" -class SapHanaAuthenticationType(str, Enum): +class SapHanaAuthenticationType(Enum): basic = "Basic" windows = "Windows" -class SftpAuthenticationType(str, Enum): +class SftpAuthenticationType(Enum): basic = "Basic" ssh_public_key = "SshPublicKey" -class FtpAuthenticationType(str, Enum): +class FtpAuthenticationType(Enum): basic = "Basic" anonymous = "Anonymous" -class HttpAuthenticationType(str, Enum): +class HttpAuthenticationType(Enum): basic = "Basic" anonymous = "Anonymous" @@ -284,7 +284,7 @@ class HttpAuthenticationType(str, Enum): client_certificate = "ClientCertificate" -class RestServiceAuthenticationType(str, Enum): +class RestServiceAuthenticationType(Enum): anonymous = "Anonymous" basic = "Basic" @@ -292,13 +292,13 @@ class RestServiceAuthenticationType(str, Enum): managed_service_identity = "ManagedServiceIdentity" -class MongoDbAuthenticationType(str, Enum): +class MongoDbAuthenticationType(Enum): basic = "Basic" anonymous = "Anonymous" -class ODataAuthenticationType(str, Enum): +class ODataAuthenticationType(Enum): basic = "Basic" anonymous = "Anonymous" @@ -307,42 +307,42 @@ class ODataAuthenticationType(str, Enum): managed_service_identity = "ManagedServiceIdentity" -class ODataAadServicePrincipalCredentialType(str, Enum): +class ODataAadServicePrincipalCredentialType(Enum): service_principal_key = "ServicePrincipalKey" service_principal_cert = "ServicePrincipalCert" -class TeradataAuthenticationType(str, Enum): +class TeradataAuthenticationType(Enum): basic = "Basic" windows = "Windows" -class Db2AuthenticationType(str, Enum): +class Db2AuthenticationType(Enum): basic = "Basic" -class SybaseAuthenticationType(str, Enum): +class SybaseAuthenticationType(Enum): basic = "Basic" windows = "Windows" -class DynamicsDeploymentType(str, Enum): +class DynamicsDeploymentType(Enum): online = "Online" on_premises_with_ifd = "OnPremisesWithIfd" -class DynamicsAuthenticationType(str, Enum): +class DynamicsAuthenticationType(Enum): office365 = "Office365" ifd = "Ifd" -class AvroCompressionCodec(str, Enum): +class AvroCompressionCodec(Enum): none = "none" deflate = "deflate" @@ -351,7 +351,7 @@ class AvroCompressionCodec(str, Enum): bzip2 = "bzip2" -class AzureFunctionActivityMethod(str, Enum): +class AzureFunctionActivityMethod(Enum): get = "GET" post = "POST" @@ -362,7 +362,7 @@ class AzureFunctionActivityMethod(str, Enum): trace = "TRACE" -class WebActivityMethod(str, Enum): +class WebActivityMethod(Enum): get = "GET" post = "POST" @@ -370,14 +370,14 @@ class WebActivityMethod(str, Enum): delete = "DELETE" -class NetezzaPartitionOption(str, Enum): +class NetezzaPartitionOption(Enum): none = "None" data_slice = "DataSlice" dynamic_range = "DynamicRange" -class CassandraSourceReadConsistencyLevels(str, Enum): +class CassandraSourceReadConsistencyLevels(Enum): all = "ALL" each_quorum = "EACH_QUORUM" @@ -391,21 +391,21 @@ class CassandraSourceReadConsistencyLevels(str, Enum): local_serial = "LOCAL_SERIAL" -class TeradataPartitionOption(str, Enum): +class TeradataPartitionOption(Enum): none = "None" hash = "Hash" dynamic_range = "DynamicRange" -class OraclePartitionOption(str, Enum): +class OraclePartitionOption(Enum): none = "None" physical_partitions_of_table = "PhysicalPartitionsOfTable" dynamic_range = "DynamicRange" -class StoredProcedureParameterType(str, Enum): +class StoredProcedureParameterType(Enum): string = "String" int_enum = "Int" @@ -416,7 +416,7 @@ class StoredProcedureParameterType(str, Enum): date_enum = "Date" -class SapTablePartitionOption(str, Enum): +class SapTablePartitionOption(Enum): none = "None" partition_on_int = "PartitionOnInt" @@ -426,67 +426,67 @@ class SapTablePartitionOption(str, Enum): partition_on_time = "PartitionOnTime" -class SalesforceSourceReadBehavior(str, Enum): +class SalesforceSourceReadBehavior(Enum): query = "Query" query_all = "QueryAll" -class SsisPackageLocationType(str, Enum): +class SsisPackageLocationType(Enum): ssisdb = "SSISDB" file = "File" -class HDInsightActivityDebugInfoOption(str, Enum): +class HDInsightActivityDebugInfoOption(Enum): none = "None" always = "Always" failure = "Failure" -class SalesforceSinkWriteBehavior(str, Enum): +class SalesforceSinkWriteBehavior(Enum): insert = "Insert" upsert = "Upsert" -class AzureSearchIndexWriteBehaviorType(str, Enum): +class AzureSearchIndexWriteBehaviorType(Enum): merge = "Merge" upload = "Upload" -class PolybaseSettingsRejectType(str, Enum): +class PolybaseSettingsRejectType(Enum): value = "value" percentage = "percentage" -class JsonWriteFilePattern(str, Enum): +class JsonWriteFilePattern(Enum): set_of_objects = "setOfObjects" array_of_objects = "arrayOfObjects" -class SapCloudForCustomerSinkWriteBehavior(str, Enum): +class SapCloudForCustomerSinkWriteBehavior(Enum): insert = "Insert" update = "Update" -class WebHookActivityMethod(str, Enum): +class WebHookActivityMethod(Enum): post = "POST" -class IntegrationRuntimeType(str, Enum): +class IntegrationRuntimeType(Enum): managed = "Managed" self_hosted = "SelfHosted" -class SelfHostedIntegrationRuntimeNodeStatus(str, Enum): +class SelfHostedIntegrationRuntimeNodeStatus(Enum): need_registration = "NeedRegistration" online = "Online" @@ -497,21 +497,21 @@ class SelfHostedIntegrationRuntimeNodeStatus(str, Enum): initialize_failed = "InitializeFailed" -class IntegrationRuntimeUpdateResult(str, Enum): +class IntegrationRuntimeUpdateResult(Enum): none = "None" succeed = "Succeed" fail = "Fail" -class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum): +class IntegrationRuntimeInternalChannelEncryptionMode(Enum): not_set = "NotSet" ssl_encrypted = "SslEncrypted" not_encrypted = "NotEncrypted" -class ManagedIntegrationRuntimeNodeStatus(str, Enum): +class ManagedIntegrationRuntimeNodeStatus(Enum): starting = "Starting" available = "Available" @@ -519,13 +519,13 @@ class ManagedIntegrationRuntimeNodeStatus(str, Enum): unavailable = "Unavailable" -class IntegrationRuntimeEntityReferenceType(str, Enum): +class IntegrationRuntimeEntityReferenceType(Enum): integration_runtime_reference = "IntegrationRuntimeReference" linked_service_reference = "LinkedServiceReference" -class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): +class IntegrationRuntimeSsisCatalogPricingTier(Enum): basic = "Basic" standard = "Standard" @@ -533,19 +533,19 @@ class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): premium_rs = "PremiumRS" -class IntegrationRuntimeLicenseType(str, Enum): +class IntegrationRuntimeLicenseType(Enum): base_price = "BasePrice" license_included = "LicenseIncluded" -class IntegrationRuntimeEdition(str, Enum): +class IntegrationRuntimeEdition(Enum): standard = "Standard" enterprise = "Enterprise" -class SsisObjectMetadataType(str, Enum): +class SsisObjectMetadataType(Enum): folder = "Folder" project = "Project" @@ -553,7 +553,7 @@ class SsisObjectMetadataType(str, Enum): environment = "Environment" -class IntegrationRuntimeAuthKeyName(str, Enum): +class IntegrationRuntimeAuthKeyName(Enum): auth_key1 = "authKey1" auth_key2 = "authKey2" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py index 364dfd79d71a..2c31b8fda0ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py @@ -15,12 +15,10 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): """Data Lake Analytics U-SQL activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,17 +26,17 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains - the U-SQL script. Type: string (or Expression with resultType string). + :param script_path: Case-sensitive path to folder that contains the U-SQL + script. Type: string (or Expression with resultType string). :type script_path: object - :param script_linked_service: Required. Script linked service reference. + :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously @@ -86,13 +84,13 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) - self.priority = kwargs.get('priority', None) - self.parameters = kwargs.get('parameters', None) - self.runtime_version = kwargs.get('runtime_version', None) - self.compilation_mode = kwargs.get('compilation_mode', None) + def __init__(self, name, script_path, script_linked_service, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None): + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py deleted file mode 100644 index 22623aa3622c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains - the U-SQL script. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously - used to run the job. Default value is 1. Type: integer (or Expression with - resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should - be selected to run first. The lower the number, the higher the priority. - Default value is 1000. Type: integer (or Expression with resultType - integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: - string (or Expression with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these - values : Semantic, Full and SingleBox. Type: string (or Expression with - resultType string). - :type compilation_mode: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, - } - - def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.script_path = script_path - self.script_linked_service = script_linked_service - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.parameters = parameters - self.runtime_version = runtime_version - self.compilation_mode = compilation_mode - self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py index a49bd973e2b9..1c28434b0ee4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py @@ -15,12 +15,10 @@ class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,16 +26,16 @@ class DatabricksNotebookActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be - run in the Databricks Workspace. This path must begin with a slash. Type: - string (or Expression with resultType string). + :param notebook_path: The absolute path of the notebook to be run in the + Databricks Workspace. This path must begin with a slash. Type: string (or + Expression with resultType string). :type notebook_path: object :param base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default @@ -68,9 +66,9 @@ class DatabricksNotebookActivity(ExecutionActivity): 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } - def __init__(self, **kwargs): - super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.notebook_path = kwargs.get('notebook_path', None) - self.base_parameters = kwargs.get('base_parameters', None) - self.libraries = kwargs.get('libraries', None) + def __init__(self, name, notebook_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None): + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py deleted file mode 100644 index 7d2d464b7a1a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be - run in the Databricks Workspace. This path must begin with a slash. Type: - string (or Expression with resultType string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this - job.If the notebook takes a parameter that is not specified, the default - value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'notebook_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.notebook_path = notebook_path - self.base_parameters = base_parameters - self.libraries = libraries - self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py index 51e7245d12fe..73348cc09b72 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py @@ -15,12 +15,10 @@ class DatabricksSparkJarActivity(ExecutionActivity): """DatabricksSparkJar activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,17 +26,16 @@ class DatabricksSparkJarActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). + :param main_class_name: The full name of the class containing the main + method to be executed. This class must be contained in a JAR provided as a + library. Type: string (or Expression with resultType string). :type main_class_name: object :param parameters: Parameters that will be passed to the main method. :type parameters: list[object] @@ -67,9 +64,9 @@ class DatabricksSparkJarActivity(ExecutionActivity): 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } - def __init__(self, **kwargs): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.main_class_name = kwargs.get('main_class_name', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) + def __init__(self, name, main_class_name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None): + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py deleted file mode 100644 index 6c33f3b51d1e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py index 56178d3882c5..f476fd1019a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py @@ -15,12 +15,10 @@ class DatabricksSparkPythonActivity(ExecutionActivity): """DatabricksSparkPython activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,16 +26,15 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). + :param python_file: The URI of the Python file to be executed. DBFS paths + are supported. Type: string (or Expression with resultType string). :type python_file: object :param parameters: Command line parameters that will be passed to the Python file. @@ -67,9 +64,9 @@ class DatabricksSparkPythonActivity(ExecutionActivity): 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } - def __init__(self, **kwargs): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.python_file = kwargs.get('python_file', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) + def __init__(self, name, python_file, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None): + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py deleted file mode 100644 index 5b16d0d5e9ef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, - } - - def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index e8e2974b4481..49eea57e719a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -49,8 +49,6 @@ class Dataset(Model): JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -63,7 +61,7 @@ class Dataset(Model): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -75,7 +73,7 @@ class Dataset(Model): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -100,14 +98,14 @@ class Dataset(Model): 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } - def __init__(self, **kwargs): - super(Dataset, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.structure = kwargs.get('structure', None) - self.schema = kwargs.get('schema', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(Dataset, self).__init__() + self.additional_properties = additional_properties + self.description = description + self.structure = structure + self.schema = schema + self.linked_service_name = linked_service_name + self.parameters = parameters + self.annotations = annotations + self.folder = folder self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py index 71b041c5eb5b..d31981df108e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py @@ -15,12 +15,10 @@ class DatasetBZip2Compression(DatasetCompression): """The BZip2 compression method used on a dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -28,11 +26,6 @@ class DatasetBZip2Compression(DatasetCompression): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(DatasetBZip2Compression, self).__init__(**kwargs) + def __init__(self, additional_properties=None): + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties) self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py deleted file mode 100644 index f97af4588e0a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py index c0c4e3d52624..78066e1cb4d8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py @@ -19,12 +19,10 @@ class DatasetCompression(Model): sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -41,7 +39,7 @@ class DatasetCompression(Model): 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } - def __init__(self, **kwargs): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(DatasetCompression, self).__init__() + self.additional_properties = additional_properties self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py deleted file mode 100644 index 3b10abc69abf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetCompression(Model): - """The compression method used on a dataset. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, - DatasetGZipCompression, DatasetBZip2Compression - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py index 9c97e2bfa5e3..e362d747db73 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -15,12 +15,10 @@ class DatasetDeflateCompression(DatasetCompression): """The Deflate compression method used on a dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param level: The Deflate compression level. :type level: object @@ -36,7 +34,7 @@ class DatasetDeflateCompression(DatasetCompression): 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DatasetDeflateCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) + def __init__(self, additional_properties=None, level=None): + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties) + self.level = level self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py deleted file mode 100644 index 11d00081bc1c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The Deflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py index 882c84a1e84c..caeb62a3416b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py @@ -24,6 +24,6 @@ class DatasetFolder(Model): 'name': {'key': 'name', 'type': 'str'}, } - def __init__(self, **kwargs): - super(DatasetFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + def __init__(self, name=None): + super(DatasetFolder, self).__init__() + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py deleted file mode 100644 index ea7fc313f967..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetFolder(Model): - """The folder that this Dataset is in. If not specified, Dataset will appear - at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, **kwargs) -> None: - super(DatasetFolder, self).__init__(**kwargs) - self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py index 4925127c7f0f..99064460bd1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -15,12 +15,10 @@ class DatasetGZipCompression(DatasetCompression): """The GZip compression method used on a dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param level: The GZip compression level. :type level: object @@ -36,7 +34,7 @@ class DatasetGZipCompression(DatasetCompression): 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DatasetGZipCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) + def __init__(self, additional_properties=None, level=None): + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties) + self.level = level self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py deleted file mode 100644 index 97346e06366d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The GZip compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py index 2c318a91cccb..3b14089552f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -15,12 +15,10 @@ class DatasetLocation(Model): """Dataset location. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -41,9 +39,9 @@ class DatasetLocation(Model): 'file_name': {'key': 'fileName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): + super(DatasetLocation, self).__init__() + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py deleted file mode 100644 index d4e32d753197..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetLocation(Model): - """Dataset location. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.folder_path = folder_path - self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py deleted file mode 100644 index 82550c2a0df8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ /dev/null @@ -1,113 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Dataset(Model): - """The Azure Data Factory nested object which identifies data within different - data stores, such as tables, files, folders, and documents. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, - CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, - AmazonS3Dataset - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(Dataset, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.structure = structure - self.schema = schema - self.linked_service_name = linked_service_name - self.parameters = parameters - self.annotations = annotations - self.folder = folder - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py index ca3d385f31ce..006074933fe7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py @@ -18,12 +18,9 @@ class DatasetReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: - "DatasetReference" . + :ivar type: Dataset reference type. Default value: "DatasetReference" . :vartype type: str - :param reference_name: Required. Reference dataset name. + :param reference_name: Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. :type parameters: dict[str, object] @@ -42,7 +39,7 @@ class DatasetReference(Model): type = "DatasetReference" - def __init__(self, **kwargs): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, reference_name, parameters=None): + super(DatasetReference, self).__init__() + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py deleted file mode 100644 index 80162fd77da1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetReference(Model): - """Dataset reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Dataset reference type. Default value: - "DatasetReference" . - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "DatasetReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py index a68fb563e425..ec6a78c21f79 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py @@ -18,8 +18,6 @@ class DatasetResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -28,7 +26,7 @@ class DatasetResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Dataset properties. + :param properties: Dataset properties. :type properties: ~azure.mgmt.datafactory.models.Dataset """ @@ -48,6 +46,6 @@ class DatasetResource(SubResource): 'properties': {'key': 'properties', 'type': 'Dataset'}, } - def __init__(self, **kwargs): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + def __init__(self, properties): + super(DatasetResource, self).__init__() + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py deleted file mode 100644 index 6eb099dcb884..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(DatasetResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py index b3160565230d..2ae233fac19f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py @@ -19,8 +19,6 @@ class DatasetStorageFormat(Model): sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, TextFormat - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -30,7 +28,7 @@ class DatasetStorageFormat(Model): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -49,9 +47,9 @@ class DatasetStorageFormat(Model): 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} } - def __init__(self, **kwargs): - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.serializer = kwargs.get('serializer', None) - self.deserializer = kwargs.get('deserializer', None) + def __init__(self, additional_properties=None, serializer=None, deserializer=None): + super(DatasetStorageFormat, self).__init__() + self.additional_properties = additional_properties + self.serializer = serializer + self.deserializer = deserializer self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py deleted file mode 100644 index faf746642d9e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DatasetStorageFormat(Model): - """The format definition of a storage. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, - TextFormat - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(DatasetStorageFormat, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.serializer = serializer - self.deserializer = deserializer - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py index ed80bf3cbcf2..86f2b69b7234 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -15,12 +15,10 @@ class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param level: The ZipDeflate compression level. :type level: object @@ -36,7 +34,7 @@ class DatasetZipDeflateCompression(DatasetCompression): 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.level = kwargs.get('level', None) + def __init__(self, additional_properties=None, level=None): + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties) + self.level = level self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py deleted file mode 100644 index 20abd6fe1088..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_compression_py3 import DatasetCompression - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param level: The ZipDeflate compression level. - :type level: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: - super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.level = level - self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py index d163d2b93c18..c869ed972ad1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -15,8 +15,6 @@ class Db2LinkedService(LinkedService): """Linked service for DB2 data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,13 @@ class Db2LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. Type: string (or Expression + with resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic' @@ -75,12 +73,12 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(Db2LinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None): + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py deleted file mode 100644 index 44d784fa9bde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.database = database - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py index a6e8c31ffa1f..469c22c5964a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py @@ -15,8 +15,6 @@ class Db2Source(CopySource): """A copy activity source for Db2 databases. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class Db2Source(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class Db2Source(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(Db2Source, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py deleted file mode 100644 index 20b169699ae0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class Db2Source(CopySource): - """A copy activity source for Db2 databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py index 7092d5fc6cb3..d9ef4fa69d04 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py @@ -15,8 +15,6 @@ class Db2TableDataset(Dataset): """The Db2 table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class Db2TableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class Db2TableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class Db2TableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(Db2TableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.db2_table_dataset_schema = kwargs.get('db2_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None): + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.db2_table_dataset_schema = db2_table_dataset_schema + self.table = table self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py deleted file mode 100644 index 3fa296454a69..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class Db2TableDataset(Dataset): - """The Db2 table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param db2_table_dataset_schema: The Db2 schema name. Type: string (or - Expression with resultType string). - :type db2_table_dataset_schema: object - :param table: The Db2 table name. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: - super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.db2_table_dataset_schema = db2_table_dataset_schema - self.table = table - self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py index 34ba33a414d5..232858510c29 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py @@ -15,12 +15,10 @@ class DeleteActivity(ExecutionActivity): """Delete activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class DeleteActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -50,7 +48,7 @@ class DeleteActivity(ExecutionActivity): when enableLogging is true. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. + :param dataset: Delete activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -77,11 +75,11 @@ class DeleteActivity(ExecutionActivity): 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } - def __init__(self, **kwargs): - super(DeleteActivity, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.enable_logging = kwargs.get('enable_logging', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.dataset = kwargs.get('dataset', None) + def __init__(self, name, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections=None, enable_logging=None, log_storage_settings=None): + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py deleted file mode 100644 index 5107d9a3381a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class DeleteActivity(ExecutionActivity): - """Delete activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path - will be deleted recursively. Default is false. Type: boolean (or - Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to - connect data source at the same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity - execution. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide - when enableLogging is true. - :type log_storage_settings: - ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.recursive = recursive - self.max_concurrent_connections = max_concurrent_connections - self.enable_logging = enable_logging - self.log_storage_settings = log_storage_settings - self.dataset = dataset - self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py index bfee26fcd12c..c2bd4a61cd0a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -15,8 +15,6 @@ class DelimitedTextDataset(Dataset): """Delimited text dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class DelimitedTextDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,9 +39,9 @@ class DelimitedTextDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param location: Required. The location of the delimited text storage. + :param location: The location of the delimited text storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). @@ -107,16 +105,16 @@ class DelimitedTextDataset(Dataset): 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DelimitedTextDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.compression_level = kwargs.get('compression_level', None) - self.quote_char = kwargs.get('quote_char', None) - self.escape_char = kwargs.get('escape_char', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.null_value = kwargs.get('null_value', None) + def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None): + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py deleted file mode 100644 index c2597e6a022b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DelimitedTextDataset(Dataset): - """Delimited text dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the delimited text storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression_codec: - :type compression_codec: object - :param compression_level: The data compression method used for - DelimitedText. - :type compression_level: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.encoding_name = encoding_name - self.compression_codec = compression_codec - self.compression_level = compression_level - self.quote_char = quote_char - self.escape_char = escape_char - self.first_row_as_header = first_row_as_header - self.null_value = null_value - self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py index 364b103c426a..54f210b41f31 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py @@ -15,12 +15,10 @@ class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with @@ -38,6 +36,6 @@ class DelimitedTextReadSettings(FormatReadSettings): 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DelimitedTextReadSettings, self).__init__(**kwargs) - self.skip_line_count = kwargs.get('skip_line_count', None) + def __init__(self, type, additional_properties=None, skip_line_count=None): + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type) + self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py deleted file mode 100644 index 62aa0327cfb9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_read_settings_py3 import FormatReadSettings - - -class DelimitedTextReadSettings(FormatReadSettings): - """Delimited text read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param skip_line_count: Indicates the number of non-empty rows to skip - when reading data from input files. Type: integer (or Expression with - resultType integer). - :type skip_line_count: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: - super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py index 15e0e590b4ee..f6f64392a9a0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -15,8 +15,6 @@ class DelimitedTextSink(CopySink): """A copy activity DelimitedText sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -63,8 +61,8 @@ class DelimitedTextSink(CopySink): 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } - def __init__(self, **kwargs): - super(DelimitedTextSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings + self.format_settings = format_settings self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py deleted file mode 100644 index 6481f8021527..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DelimitedTextSink(CopySink): - """A copy activity DelimitedText sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py index 10a842ca374a..7b8462198800 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -15,8 +15,6 @@ class DelimitedTextSource(CopySource): """A copy activity DelimitedText source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DelimitedTextSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -54,8 +52,8 @@ class DelimitedTextSource(CopySource): 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, } - def __init__(self, **kwargs): - super(DelimitedTextSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings + self.format_settings = format_settings self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py deleted file mode 100644 index e551e32c847e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DelimitedTextSource(CopySource): - """A copy activity DelimitedText source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: DelimitedText store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :param format_settings: DelimitedText format settings. - :type format_settings: - ~azure.mgmt.datafactory.models.DelimitedTextReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py index 5e0d8db319e5..db355b5a2dff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py @@ -15,19 +15,17 @@ class DelimitedTextWriteSettings(FormatWriteSettings): """Delimited text write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. + :param type: The write setting type. :type type: str :param quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). :type quote_all_text: object - :param file_extension: Required. The file extension used to create the - files. Type: string (or Expression with resultType string). + :param file_extension: The file extension used to create the files. Type: + string (or Expression with resultType string). :type file_extension: object """ @@ -43,7 +41,7 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'file_extension': {'key': 'fileExtension', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DelimitedTextWriteSettings, self).__init__(**kwargs) - self.quote_all_text = kwargs.get('quote_all_text', None) - self.file_extension = kwargs.get('file_extension', None) + def __init__(self, type, file_extension, additional_properties=None, quote_all_text=None): + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py deleted file mode 100644 index 2be019ab1e6a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings_py3 import FormatWriteSettings - - -class DelimitedTextWriteSettings(FormatWriteSettings): - """Delimited text write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param quote_all_text: Indicates whether string values should always be - enclosed with quotes. Type: boolean (or Expression with resultType - boolean). - :type quote_all_text: object - :param file_extension: Required. The file extension used to create the - files. Type: string (or Expression with resultType string). - :type file_extension: object - """ - - _validation = { - 'type': {'required': True}, - 'file_extension': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, - 'file_extension': {'key': 'fileExtension', 'type': 'object'}, - } - - def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: - super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.quote_all_text = quote_all_text - self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py index 89e750df8f0d..c807dc52afe1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py @@ -19,9 +19,7 @@ class DependencyReference(Model): sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -37,6 +35,6 @@ class DependencyReference(Model): 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} } - def __init__(self, **kwargs): - super(DependencyReference, self).__init__(**kwargs) + def __init__(self): + super(DependencyReference, self).__init__() self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py deleted file mode 100644 index 1b0647b74991..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DependencyReference(Model): - """Referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfDependencyTumblingWindowTriggerReference, - TriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} - } - - def __init__(self, **kwargs) -> None: - super(DependencyReference, self).__init__(**kwargs) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py index a8065ec3cc06..fde14d4a8c35 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py @@ -15,16 +15,13 @@ class DistcpSettings(Model): """Distcp settings. - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn - ResourceManager endpoint. Type: string (or Expression with resultType - string). + :param resource_manager_endpoint: Specifies the Yarn ResourceManager + endpoint. Type: string (or Expression with resultType string). :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which - will be used to store temp Distcp command script. The script file is - generated by ADF and will be removed after Copy job finished. Type: string - (or Expression with resultType string). + :param temp_script_path: Specifies an existing folder path which will be + used to store temp Distcp command script. The script file is generated by + ADF and will be removed after Copy job finished. Type: string (or + Expression with resultType string). :type temp_script_path: object :param distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). @@ -42,8 +39,8 @@ class DistcpSettings(Model): 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) - self.temp_script_path = kwargs.get('temp_script_path', None) - self.distcp_options = kwargs.get('distcp_options', None) + def __init__(self, resource_manager_endpoint, temp_script_path, distcp_options=None): + super(DistcpSettings, self).__init__() + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py deleted file mode 100644 index 628e2d207f8e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class DistcpSettings(Model): - """Distcp settings. - - All required parameters must be populated in order to send to Azure. - - :param resource_manager_endpoint: Required. Specifies the Yarn - ResourceManager endpoint. Type: string (or Expression with resultType - string). - :type resource_manager_endpoint: object - :param temp_script_path: Required. Specifies an existing folder path which - will be used to store temp Distcp command script. The script file is - generated by ADF and will be removed after Copy job finished. Type: string - (or Expression with resultType string). - :type temp_script_path: object - :param distcp_options: Specifies the Distcp options. Type: string (or - Expression with resultType string). - :type distcp_options: object - """ - - _validation = { - 'resource_manager_endpoint': {'required': True}, - 'temp_script_path': {'required': True}, - } - - _attribute_map = { - 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, - 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, - 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, - } - - def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: - super(DistcpSettings, self).__init__(**kwargs) - self.resource_manager_endpoint = resource_manager_endpoint - self.temp_script_path = temp_script_path - self.distcp_options = distcp_options diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py index fb2b8d46fa9c..543f756ce825 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py @@ -15,8 +15,6 @@ class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class DocumentDbCollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class DocumentDbCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param collection_name: Required. Document Database collection name. Type: - string (or Expression with resultType string). + :param collection_name: Document Database collection name. Type: string + (or Expression with resultType string). :type collection_name: object """ @@ -67,7 +65,7 @@ class DocumentDbCollectionDataset(Dataset): 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.collection_name = kwargs.get('collection_name', None) + def __init__(self, linked_service_name, collection_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.collection_name = collection_name self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py deleted file mode 100644 index 5eb4dbbf0997..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. Document Database collection name. Type: - string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection_name = collection_name - self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index c2908dc1dd05..42d1a3408d3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -15,8 +15,6 @@ class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). @@ -64,8 +62,8 @@ class DocumentDbCollectionSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DocumentDbCollectionSink, self).__init__(**kwargs) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.write_behavior = kwargs.get('write_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None): + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py deleted file mode 100644 index f1410cd211a4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DocumentDbCollectionSink(CopySink): - """A copy activity Document Database Collection sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param nesting_separator: Nested properties separator. Default is . (dot). - Type: string (or Expression with resultType string). - :type nesting_separator: object - :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. - :type write_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.nesting_separator = nesting_separator - self.write_behavior = write_behavior - self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py index 9fdd23f2795f..984a7ca07efc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -15,8 +15,6 @@ class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DocumentDbCollectionSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType string). @@ -55,8 +53,8 @@ class DocumentDbCollectionSource(CopySource): 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DocumentDbCollectionSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.nesting_separator = kwargs.get('nesting_separator', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None): + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.nesting_separator = nesting_separator self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py deleted file mode 100644 index 9e0bf6382b04..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DocumentDbCollectionSource(CopySource): - """A copy activity Document Database Collection source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Documents query. Type: string (or Expression with resultType - string). - :type query: object - :param nesting_separator: Nested properties separator. Type: string (or - Expression with resultType string). - :type nesting_separator: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.nesting_separator = nesting_separator - self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py index c5428ace02a2..93457bad7144 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -15,8 +15,6 @@ class DrillLinkedService(LinkedService): """Drill server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DrillLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -61,9 +59,9 @@ class DrillLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DrillLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py deleted file mode 100644 index 5fb0cb25ecdb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DrillLinkedService(LinkedService): - """Drill server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py index 9a3391f27786..df41ba1e85ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -15,8 +15,6 @@ class DrillSource(CopySource): """A copy activity Drill server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DrillSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class DrillSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DrillSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py deleted file mode 100644 index 313183abab83..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DrillSource(CopySource): - """A copy activity Drill server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py index 3dfd5715deb9..d277b8da3473 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py @@ -15,8 +15,6 @@ class DrillTableDataset(Dataset): """Drill server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class DrillTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class DrillTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class DrillTableDataset(Dataset): 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DrillTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None): + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.drill_table_dataset_schema = drill_table_dataset_schema self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py deleted file mode 100644 index db46bdc4e0bd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DrillTableDataset(Dataset): - """Drill server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression - with resultType string). - :type table: object - :param drill_table_dataset_schema: The schema name of the Drill. Type: - string (or Expression with resultType string). - :type drill_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.drill_table_dataset_schema = drill_table_dataset_schema - self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py index 5ff0b150718b..bea4e0a82b36 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -15,8 +15,6 @@ class DynamicsAXLinkedService(LinkedService): """Dynamics AX linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,25 +29,25 @@ class DynamicsAXLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and - Operations) instance OData endpoint. + :param url: The Dynamics AX (or Dynamics 365 Finance and Operations) + instance OData endpoint. :type url: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). + :param service_principal_id: Specify the application's client ID. Type: + string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - Mark this field as a SecureString to store it securely in Data Factory, or - reference a secret stored in Azure Key Vault. Type: string (or Expression - with resultType string). + :param service_principal_key: Specify the application's key. Mark this + field as a SecureString to store it securely in Data Factory, or reference + a secret stored in Azure Key Vault. Type: string (or Expression with + resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or - tenant ID) under which your application resides. Retrieve it by hovering - the mouse in the top-right corner of the Azure portal. Type: string (or - Expression with resultType string). + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Retrieve it by hovering the mouse in + the top-right corner of the Azure portal. Type: string (or Expression with + resultType string). :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting + :param aad_resource_id: Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for @@ -82,12 +80,12 @@ class DynamicsAXLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsAXLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, encrypted_credential=None): + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py deleted file mode 100644 index 79d3a34ba313..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and - Operations) instance OData endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - Mark this field as a SecureString to store it securely in Data Factory, or - reference a secret stored in Azure Key Vault. Type: string (or Expression - with resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or - tenant ID) under which your application resides. Retrieve it by hovering - the mouse in the top-right corner of the Azure portal. Type: string (or - Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting - authorization. Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py index 392b8ac7b971..6fe4046a328e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -15,8 +15,6 @@ class DynamicsAXResourceDataset(Dataset): """The path of the Dynamics AX OData entity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class DynamicsAXResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class DynamicsAXResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param path: Required. The path of the Dynamics AX OData entity. Type: - string (or Expression with resultType string). + :param path: The path of the Dynamics AX OData entity. Type: string (or + Expression with resultType string). :type path: object """ @@ -67,7 +65,7 @@ class DynamicsAXResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsAXResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, path, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.path = path self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py deleted file mode 100644 index 6cade3e4aa59..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the Dynamics AX OData entity. Type: - string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py index 619bad0f75c9..ce549554360a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -15,8 +15,6 @@ class DynamicsAXSource(CopySource): """A copy activity Dynamics AX source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DynamicsAXSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class DynamicsAXSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsAXSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py deleted file mode 100644 index 7679e68bae7b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DynamicsAXSource(CopySource): - """A copy activity Dynamics AX source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py index ff4079761cf0..e167a1eccc27 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py @@ -15,8 +15,6 @@ class DynamicsCrmEntityDataset(Dataset): """The Dynamics CRM entity dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class DynamicsCrmEntityDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class DynamicsCrmEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class DynamicsCrmEntityDataset(Dataset): 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None): + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.entity_name = entity_name self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py deleted file mode 100644 index 4a1ef86b2dc6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py index aad71042bb04..8e81da0ae9ef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py @@ -15,8 +15,6 @@ class DynamicsCrmLinkedService(LinkedService): """Dynamics CRM linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,12 +29,12 @@ class DynamicsCrmLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param deployment_type: Required. The deployment type of the Dynamics CRM - instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for - Dynamics CRM on-premises with Ifd. Type: string (or Expression with - resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :param deployment_type: The deployment type of the Dynamics CRM instance. + 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Dynamics CRM server. @@ -56,14 +54,14 @@ class DynamicsCrmLinkedService(LinkedService): when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' + :param authentication_type: The authentication type to connect to Dynamics + CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with + Ifd scenario. Type: string (or Expression with resultType string). + Possible values include: 'Office365', 'Ifd' :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Dynamics CRM instance. - Type: string (or Expression with resultType string). + :param username: User name to access the Dynamics CRM instance. Type: + string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics CRM instance. :type password: ~azure.mgmt.datafactory.models.SecretBase @@ -98,15 +96,15 @@ class DynamicsCrmLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None): + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py deleted file mode 100644 index 2286301fabef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics CRM - instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for - Dynamics CRM on-premises with Ifd. Type: string (or Expression with - resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' - :type deployment_type: str or - ~azure.mgmt.datafactory.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. - The property is required for on-prem and not allowed for online. Type: - string (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The - property is required for on-line and not allowed for on-prem. Type: string - (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM - instance. The property is required for on-prem and required for online - when there are more than one Dynamics CRM instances associated with the - user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: Required. User name to access the Dynamics CRM instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py index 2d0f462e0f59..2e633abbffa2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py @@ -18,8 +18,6 @@ class DynamicsCrmSink(CopySink): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -41,10 +39,10 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . + :ivar write_behavior: The write behavior for the operation. Default value: + "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. @@ -71,7 +69,7 @@ class DynamicsCrmSink(CopySink): write_behavior = "Upsert" - def __init__(self, **kwargs): - super(DynamicsCrmSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None): + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.ignore_null_values = ignore_null_values self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py deleted file mode 100644 index d9f4fcf092c8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DynamicsCrmSink(CopySink): - """A copy activity Dynamics CRM sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether to ignore null - values from input dataset (except key fields) during write operation. - Default is false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py index 641fad43f437..6c22c8c5dd07 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py @@ -15,8 +15,6 @@ class DynamicsCrmSource(CopySource): """A copy activity Dynamics CRM source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DynamicsCrmSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression @@ -52,7 +50,7 @@ class DynamicsCrmSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsCrmSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py deleted file mode 100644 index 29c3e78609a5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DynamicsCrmSource(CopySource): - """A copy activity Dynamics CRM source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py index 435c6d153066..0ef30c87b5b1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py @@ -15,8 +15,6 @@ class DynamicsEntityDataset(Dataset): """The Dynamics entity dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class DynamicsEntityDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class DynamicsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class DynamicsEntityDataset(Dataset): 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsEntityDataset, self).__init__(**kwargs) - self.entity_name = kwargs.get('entity_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None): + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.entity_name = entity_name self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py deleted file mode 100644 index 7ee671890354..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param entity_name: The logical name of the entity. Type: string (or - Expression with resultType string). - :type entity_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.entity_name = entity_name - self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py index c925033d1240..7d0111ef5fbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -15,8 +15,6 @@ class DynamicsLinkedService(LinkedService): """Dynamics linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,12 +29,11 @@ class DynamicsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param deployment_type: Required. The deployment type of the Dynamics - instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for - Dynamics on-premises with Ifd. Type: string (or Expression with resultType - string). + :param deployment_type: The deployment type of the Dynamics instance. + 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics + on-premises with Ifd. Type: string (or Expression with resultType string). :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string @@ -55,12 +52,12 @@ class DynamicsLinkedService(LinkedService): are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - with Ifd scenario. Type: string (or Expression with resultType string). + :param authentication_type: The authentication type to connect to Dynamics + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd + scenario. Type: string (or Expression with resultType string). :type authentication_type: object - :param username: Required. User name to access the Dynamics instance. - Type: string (or Expression with resultType string). + :param username: User name to access the Dynamics instance. Type: string + (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics instance. :type password: ~azure.mgmt.datafactory.models.SecretBase @@ -95,15 +92,15 @@ class DynamicsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsLinkedService, self).__init__(**kwargs) - self.deployment_type = kwargs.get('deployment_type', None) - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None): + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py deleted file mode 100644 index 07c028ff2477..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py +++ /dev/null @@ -1,109 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param deployment_type: Required. The deployment type of the Dynamics - instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for - Dynamics on-premises with Ifd. Type: string (or Expression with resultType - string). - :type deployment_type: object - :param host_name: The host name of the on-premises Dynamics server. The - property is required for on-prem and not allowed for online. Type: string - (or Expression with resultType string). - :type host_name: object - :param port: The port of on-premises Dynamics server. The property is - required for on-prem and not allowed for online. Default is 443. Type: - integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics server. The property - is required for on-line and not allowed for on-prem. Type: string (or - Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics instance. - The property is required for on-prem and required for online when there - are more than one Dynamics instances associated with the user. Type: - string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect - to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises - with Ifd scenario. Type: string (or Expression with resultType string). - :type authentication_type: object - :param username: Required. User name to access the Dynamics instance. - Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 45bac7b52064..47015ec8fdad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -18,8 +18,6 @@ class DynamicsSink(CopySink): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -41,10 +39,10 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . + :ivar write_behavior: The write behavior for the operation. Default value: + "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is @@ -71,7 +69,7 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, **kwargs): - super(DynamicsSink, self).__init__(**kwargs) - self.ignore_null_values = kwargs.get('ignore_null_values', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None): + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py deleted file mode 100644 index 5f736f9cf658..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class DynamicsSink(CopySink): - """A copy activity Dynamics sink. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str - :param ignore_null_values: The flag indicating whether ignore null values - from input dataset (except key fields) during write operation. Default is - false. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - 'write_behavior': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - write_behavior = "Upsert" - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.ignore_null_values = ignore_null_values - self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py index d38f96fee911..5f6c0472bedc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -15,8 +15,6 @@ class DynamicsSource(CopySource): """A copy activity Dynamics source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class DynamicsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression @@ -52,7 +50,7 @@ class DynamicsSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(DynamicsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py deleted file mode 100644 index 12d83625bc6a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class DynamicsSource(CopySource): - """A copy activity Dynamics source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: FetchXML is a proprietary query language that is used in - Microsoft Dynamics (online & on-premises). Type: string (or Expression - with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py index 6249c2e2334b..f9527b58a4f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -15,8 +15,6 @@ class EloquaLinkedService(LinkedService): """Eloqua server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,13 @@ class EloquaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + :param endpoint: The endpoint of the Eloqua server. (i.e. eloqua.example.com) :type endpoint: object - :param username: Required. The site name and user name of your Eloqua - account in the form: sitename/username. (i.e. Eloqua/Alice) + :param username: The site name and user name of your Eloqua account in the + form: sitename/username. (i.e. Eloqua/Alice) :type username: object :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase @@ -79,13 +77,13 @@ class EloquaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(EloquaLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py deleted file mode 100644 index 623d798036a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. - eloqua.example.com) - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua - account in the form: sitename/username. (i.e. Eloqua/Alice) - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py index 56adc0ce47c4..ca33bd388e2a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py @@ -15,8 +15,6 @@ class EloquaObjectDataset(Dataset): """Eloqua server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class EloquaObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class EloquaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class EloquaObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(EloquaObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py deleted file mode 100644 index 705f43cd225c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py index f016140189f1..94d399ea1a32 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -15,8 +15,6 @@ class EloquaSource(CopySource): """A copy activity Eloqua server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class EloquaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class EloquaSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(EloquaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py deleted file mode 100644 index d200ff32fd9d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class EloquaSource(CopySource): - """A copy activity Eloqua server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py index 5db1448a5a55..ee7eeed24a3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -28,7 +28,7 @@ class EntityReference(Model): 'reference_name': {'key': 'referenceName', 'type': 'str'}, } - def __init__(self, **kwargs): - super(EntityReference, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.reference_name = kwargs.get('reference_name', None) + def __init__(self, type=None, reference_name=None): + super(EntityReference, self).__init__() + self.type = type + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py deleted file mode 100644 index f87698b67a64..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class EntityReference(Model): - """The entity reference. - - :param type: The type of this referenced entity. Possible values include: - 'IntegrationRuntimeReference', 'LinkedServiceReference' - :type type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType - :param reference_name: The name of this referenced entity. - :type reference_name: str - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: - super(EntityReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py index 0008b5eee153..a28ea529c95b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py @@ -15,12 +15,10 @@ class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,9 +26,9 @@ class ExecutePipelineActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param pipeline: Required. Pipeline reference. + :param pipeline: Pipeline reference. :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] @@ -57,9 +55,9 @@ class ExecutePipelineActivity(ControlActivity): 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, } - def __init__(self, **kwargs): - super(ExecutePipelineActivity, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.parameters = kwargs.get('parameters', None) - self.wait_on_completion = kwargs.get('wait_on_completion', None) + def __init__(self, name, pipeline, additional_properties=None, description=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion=None): + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py deleted file mode 100644 index addaafabe7b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class ExecutePipelineActivity(ControlActivity): - """Execute pipeline activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait - for the dependent pipeline execution to finish. Default is false. - :type wait_on_completion: bool - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, - } - - def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.pipeline = pipeline - self.parameters = parameters - self.wait_on_completion = wait_on_completion - self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py index 9efa853dac86..b9003be9c119 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -15,12 +15,10 @@ class ExecuteSSISPackageActivity(ExecutionActivity): """Execute SSIS package activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,14 +26,14 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. + :param package_location: SSIS package location. :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType @@ -50,7 +48,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :param execution_credential: The package execution credential. :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. + :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param project_parameters: The project level parameters to execute the @@ -107,18 +105,18 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } - def __init__(self, **kwargs): - super(ExecuteSSISPackageActivity, self).__init__(**kwargs) - self.package_location = kwargs.get('package_location', None) - self.runtime = kwargs.get('runtime', None) - self.logging_level = kwargs.get('logging_level', None) - self.environment_path = kwargs.get('environment_path', None) - self.execution_credential = kwargs.get('execution_credential', None) - self.connect_via = kwargs.get('connect_via', None) - self.project_parameters = kwargs.get('project_parameters', None) - self.package_parameters = kwargs.get('package_parameters', None) - self.project_connection_managers = kwargs.get('project_connection_managers', None) - self.package_connection_managers = kwargs.get('package_connection_managers', None) - self.property_overrides = kwargs.get('property_overrides', None) - self.log_location = kwargs.get('log_location', None) + def __init__(self, name, package_location, connect_via, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None): + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.log_location = log_location self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py deleted file mode 100644 index 64efa9cd63ac..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value - should be "x86" or "x64". Type: string (or Expression with resultType - string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: - string (or Expression with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. - Type: string (or Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: - ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the - SSIS package. - :type project_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the - SSIS package. - :type package_parameters: dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers - to execute the SSIS package. - :type project_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param package_connection_managers: The package level connection managers - to execute the SSIS package. - :type package_connection_managers: dict[str, dict[str, - ~azure.mgmt.datafactory.models.SSISExecutionParameter]] - :param property_overrides: The property overrides to execute the SSIS - package. - :type property_overrides: dict[str, - ~azure.mgmt.datafactory.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, - } - - def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.package_location = package_location - self.runtime = runtime - self.logging_level = logging_level - self.environment_path = environment_path - self.execution_credential = execution_credential - self.connect_via = connect_via - self.project_parameters = project_parameters - self.package_parameters = package_parameters - self.project_connection_managers = project_connection_managers - self.package_connection_managers = package_connection_managers - self.property_overrides = property_overrides - self.log_location = log_location - self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py index 8c16eff2c753..b2b0bbcf9714 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -26,12 +26,10 @@ class ExecutionActivity(Activity): HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -39,7 +37,7 @@ class ExecutionActivity(Activity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -68,8 +66,8 @@ class ExecutionActivity(Activity): 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } - def __init__(self, **kwargs): - super(ExecutionActivity, self).__init__(**kwargs) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None): + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.linked_service_name = linked_service_name + self.policy = policy self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py deleted file mode 100644 index 5deb58db81a7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py +++ /dev/null @@ -1,75 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .activity_py3 import Activity - - -class ExecutionActivity(Activity): - """Base class for all execution activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, - SqlServerStoredProcedureActivity, CustomActivity, - ExecuteSSISPackageActivity, HDInsightSparkActivity, - HDInsightStreamingActivity, HDInsightMapReduceActivity, - HDInsightPigActivity, HDInsightHiveActivity, CopyActivity - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - } - - _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.linked_service_name = linked_service_name - self.policy = policy - self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py index a6a2cc280b4d..c10882c1f471 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py @@ -26,7 +26,7 @@ class ExposureControlRequest(Model): 'feature_type': {'key': 'featureType', 'type': 'str'}, } - def __init__(self, **kwargs): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = kwargs.get('feature_name', None) - self.feature_type = kwargs.get('feature_type', None) + def __init__(self, feature_name=None, feature_type=None): + super(ExposureControlRequest, self).__init__() + self.feature_name = feature_name + self.feature_type = feature_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py deleted file mode 100644 index b3f4099fb972..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ExposureControlRequest(Model): - """The exposure control request. - - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = feature_name - self.feature_type = feature_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py index 868647e3c5b3..51237ef9536a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py @@ -34,7 +34,7 @@ class ExposureControlResponse(Model): 'value': {'key': 'value', 'type': 'str'}, } - def __init__(self, **kwargs): - super(ExposureControlResponse, self).__init__(**kwargs) + def __init__(self): + super(ExposureControlResponse, self).__init__() self.feature_name = None self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py deleted file mode 100644 index 1ac7138e7984..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ExposureControlResponse(Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py index 4b16ceca2794..1dcebd0c48de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py @@ -18,11 +18,9 @@ class Expression(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression" . + :ivar type: Expression type. Default value: "Expression" . :vartype type: str - :param value: Required. Expression value. + :param value: Expression value. :type value: str """ @@ -38,6 +36,6 @@ class Expression(Model): type = "Expression" - def __init__(self, **kwargs): - super(Expression, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + def __init__(self, value): + super(Expression, self).__init__() + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py deleted file mode 100644 index c6ad023a57ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Expression(Model): - """Azure Data Factory expression definition. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression" . - :vartype type: str - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - type = "Expression" - - def __init__(self, *, value: str, **kwargs) -> None: - super(Expression, self).__init__(**kwargs) - self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py index 614b3d7fc97a..c425c329d199 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py @@ -71,11 +71,11 @@ class Factory(Resource): 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, } - def __init__(self, **kwargs): - super(Factory, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.identity = kwargs.get('identity', None) + def __init__(self, location=None, tags=None, additional_properties=None, identity=None, repo_configuration=None): + super(Factory, self).__init__(location=location, tags=tags) + self.additional_properties = additional_properties + self.identity = identity self.provisioning_state = None self.create_time = None self.version = None - self.repo_configuration = kwargs.get('repo_configuration', None) + self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py index 02cec39d8313..30ab5463f015 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py @@ -15,19 +15,17 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): """Factory's GitHub repo information. - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. + :param account_name: Account name. :type account_name: str - :param repository_name: Required. Repository name. + :param repository_name: Repository name. :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. + :param collaboration_branch: Collaboration branch. :type collaboration_branch: str - :param root_folder: Required. Root folder. + :param root_folder: Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com @@ -52,7 +50,7 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): 'host_name': {'key': 'hostName', 'type': 'str'}, } - def __init__(self, **kwargs): - super(FactoryGitHubConfiguration, self).__init__(**kwargs) - self.host_name = kwargs.get('host_name', None) + def __init__(self, account_name, repository_name, collaboration_branch, root_folder, last_commit_id=None, host_name=None): + super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id) + self.host_name = host_name self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py deleted file mode 100644 index 23c5dbf21f0c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .factory_repo_configuration_py3 import FactoryRepoConfiguration - - -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param host_name: GitHub Enterprise host name. For example: - https://github.mydomain.com - :type host_name: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: - super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.host_name = host_name - self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py index dad745424af3..e1c7644fee88 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py @@ -18,10 +18,8 @@ class FactoryIdentity(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. The identity type. Currently the only supported type - is 'SystemAssigned'. Default value: "SystemAssigned" . + :ivar type: The identity type. Currently the only supported type is + 'SystemAssigned'. Default value: "SystemAssigned" . :vartype type: str :ivar principal_id: The principal id of the identity. :vartype principal_id: str @@ -43,7 +41,7 @@ class FactoryIdentity(Model): type = "SystemAssigned" - def __init__(self, **kwargs): - super(FactoryIdentity, self).__init__(**kwargs) + def __init__(self): + super(FactoryIdentity, self).__init__() self.principal_id = None self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py deleted file mode 100644 index 567100d8c19e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryIdentity(Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. The identity type. Currently the only supported type - is 'SystemAssigned'. Default value: "SystemAssigned" . - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - type = "SystemAssigned" - - def __init__(self, **kwargs) -> None: - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py deleted file mode 100644 index 0682aa5f8852..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .resource_py3 import Resource - - -class Factory(Resource): - """Factory resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: - super(Factory, self).__init__(location=location, tags=tags, **kwargs) - self.additional_properties = additional_properties - self.identity = identity - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py index 7c20db016c71..d470fece8248 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py @@ -18,19 +18,17 @@ class FactoryRepoConfiguration(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. + :param account_name: Account name. :type account_name: str - :param repository_name: Required. Repository name. + :param repository_name: Repository name. :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. + :param collaboration_branch: Collaboration branch. :type collaboration_branch: str - :param root_folder: Required. Root folder. + :param root_folder: Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -55,11 +53,11 @@ class FactoryRepoConfiguration(Model): 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} } - def __init__(self, **kwargs): - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.account_name = kwargs.get('account_name', None) - self.repository_name = kwargs.get('repository_name', None) - self.collaboration_branch = kwargs.get('collaboration_branch', None) - self.root_folder = kwargs.get('root_folder', None) - self.last_commit_id = kwargs.get('last_commit_id', None) + def __init__(self, account_name, repository_name, collaboration_branch, root_folder, last_commit_id=None): + super(FactoryRepoConfiguration, self).__init__() + self.account_name = account_name + self.repository_name = repository_name + self.collaboration_branch = collaboration_branch + self.root_folder = root_folder + self.last_commit_id = last_commit_id self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py deleted file mode 100644 index eefed7978850..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryRepoConfiguration(Model): - """Factory's git repo information. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.account_name = account_name - self.repository_name = repository_name - self.collaboration_branch = collaboration_branch - self.root_folder = root_folder - self.last_commit_id = last_commit_id - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py index 44eac9d287ce..0a29c7c8a9bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py @@ -27,7 +27,7 @@ class FactoryRepoUpdate(Model): 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, } - def __init__(self, **kwargs): - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = kwargs.get('factory_resource_id', None) - self.repo_configuration = kwargs.get('repo_configuration', None) + def __init__(self, factory_resource_id=None, repo_configuration=None): + super(FactoryRepoUpdate, self).__init__() + self.factory_resource_id = factory_resource_id + self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py deleted file mode 100644 index 68aca7a48db8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryRepoUpdate(Model): - """Factory's git repo information. - - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: - ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - """ - - _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - } - - def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = factory_resource_id - self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py index e9977fceff86..0524027900dd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py @@ -26,7 +26,7 @@ class FactoryUpdateParameters(Model): 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, } - def __init__(self, **kwargs): - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.identity = kwargs.get('identity', None) + def __init__(self, tags=None, identity=None): + super(FactoryUpdateParameters, self).__init__() + self.tags = tags + self.identity = identity diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py deleted file mode 100644 index 5bd523fedf3d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FactoryUpdateParameters(Model): - """Parameters for updating a factory resource. - - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - } - - def __init__(self, *, tags=None, identity=None, **kwargs) -> None: - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = tags - self.identity = identity diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py index 6d07c68d23e3..3ee1c33ba563 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py @@ -15,21 +15,19 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): """Factory's VSTS repo information. - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. + :param account_name: Account name. :type account_name: str - :param repository_name: Required. Repository name. + :param repository_name: Repository name. :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. + :param collaboration_branch: Collaboration branch. :type collaboration_branch: str - :param root_folder: Required. Root folder. + :param root_folder: Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param project_name: Required. VSTS project name. + :param project_name: VSTS project name. :type project_name: str :param tenant_id: VSTS tenant id. :type tenant_id: str @@ -55,8 +53,8 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } - def __init__(self, **kwargs): - super(FactoryVSTSConfiguration, self).__init__(**kwargs) - self.project_name = kwargs.get('project_name', None) - self.tenant_id = kwargs.get('tenant_id', None) + def __init__(self, account_name, repository_name, collaboration_branch, root_folder, project_name, last_commit_id=None, tenant_id=None): + super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id) + self.project_name = project_name + self.tenant_id = tenant_id self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py deleted file mode 100644 index 4f13c0959d63..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .factory_repo_configuration_py3 import FactoryRepoConfiguration - - -class FactoryVSTSConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. - - All required parameters must be populated in order to send to Azure. - - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param type: Required. Constant filled by server. - :type type: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str - """ - - _validation = { - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'type': {'required': True}, - 'project_name': {'required': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: - super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.project_name = project_name - self.tenant_id = tenant_id - self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py index ffced5c2e689..e47347966c69 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -15,8 +15,6 @@ class FileServerLinkedService(LinkedService): """File system linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class FileServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). + :param host: Host name of the server. Type: string (or Expression with + resultType string). :type host: object :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). @@ -65,10 +63,10 @@ class FileServerLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(FileServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None): + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py deleted file mode 100644 index ec6fe58bb3a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the server. Type: string (or - Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression - with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py index edce5fe68a65..247ff02a87a8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -15,12 +15,10 @@ class FileServerLocation(DatasetLocation): """The location of file server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -34,12 +32,5 @@ class FileServerLocation(DatasetLocation): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FileServerLocation, self).__init__(**kwargs) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py deleted file mode 100644 index f7fb8354bcbc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class FileServerLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py index da9d0809e03a..00b1ea4539cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py @@ -15,12 +15,10 @@ class FileServerReadSettings(StoreReadSettings): """File server read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -63,11 +61,11 @@ class FileServerReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, **kwargs): - super(FileServerReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py deleted file mode 100644 index 1fadb49b1795..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class FileServerReadSettings(StoreReadSettings): - """File server read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string - (or Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: FileServer wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py index e3bc7946d1ac..39a21b076162 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py @@ -15,8 +15,6 @@ class FileServerWriteSettings(StoreWriteSettings): """File server write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class FileServerWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -34,13 +32,6 @@ class FileServerWriteSettings(StoreWriteSettings): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(FileServerWriteSettings, self).__init__(**kwargs) + def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None): + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py deleted file mode 100644 index b174cf537577..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_write_settings_py3 import StoreWriteSettings - - -class FileServerWriteSettings(StoreWriteSettings): - """File server write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py index 6874f4c08929..adafa94f6030 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -15,8 +15,6 @@ class FileShareDataset(Dataset): """An on-premises file system dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class FileShareDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class FileShareDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). @@ -89,13 +87,13 @@ class FileShareDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(FileShareDataset, self).__init__(**kwargs) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.format = kwargs.get('format', None) - self.file_filter = kwargs.get('file_filter', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None): + super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.folder_path = folder_path + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.file_filter = file_filter + self.compression = compression self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py deleted file mode 100644 index 19e88a264e12..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class FileShareDataset(Dataset): - """An on-premises file system dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param folder_path: The path of the on-premises file system. Type: string - (or Expression with resultType string). - :type folder_path: object - :param file_name: The name of the on-premises file system. Type: string - (or Expression with resultType string). - :type file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param format: The format of the files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param file_filter: Specify a filter to be used to select a subset of - files in the folderPath rather than all files. Type: string (or Expression - with resultType string). - :type file_filter: object - :param compression: The data compression method used for the file system. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: - super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.folder_path = folder_path - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.file_filter = file_filter - self.compression = compression - self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 8b8f238c9534..11baf9c0567e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -15,8 +15,6 @@ class FileSystemSink(CopySink): """A copy activity file system sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object @@ -59,7 +57,7 @@ class FileSystemSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, **kwargs): - super(FileSystemSink, self).__init__(**kwargs) - self.copy_behavior = kwargs.get('copy_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None): + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.copy_behavior = copy_behavior self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py deleted file mode 100644 index 24f8623cbb02..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class FileSystemSink(CopySink): - """A copy activity file system sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.copy_behavior = copy_behavior - self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py index 2986b1848153..751ae6c44f67 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -15,8 +15,6 @@ class FileSystemSource(CopySource): """A copy activity file system source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class FileSystemSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType @@ -52,7 +50,7 @@ class FileSystemSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, **kwargs): - super(FileSystemSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None): + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py deleted file mode 100644 index 0598490ca51c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class FileSystemSource(CopySource): - """A copy activity file system source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py index 1346bb234695..ed9e2ca9c8fc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py @@ -15,12 +15,10 @@ class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,11 +26,11 @@ class FilterActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param items: Required. Input array on which filter should be applied. + :param items: Input array on which filter should be applied. :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. + :param condition: Condition to be used for filtering the input. :type condition: ~azure.mgmt.datafactory.models.Expression """ @@ -54,8 +52,8 @@ class FilterActivity(ControlActivity): 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, } - def __init__(self, **kwargs): - super(FilterActivity, self).__init__(**kwargs) - self.items = kwargs.get('items', None) - self.condition = kwargs.get('condition', None) + def __init__(self, name, items, condition, additional_properties=None, description=None, depends_on=None, user_properties=None): + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.items = items + self.condition = condition self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py deleted file mode 100644 index a07cf01d1dd5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py +++ /dev/null @@ -1,61 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class FilterActivity(ControlActivity): - """Filter and return results from input array based on the conditions. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.mgmt.datafactory.models.Expression - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, - } - - def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.items = items - self.condition = condition - self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py index 5edfa2a8140e..26266fd752bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py @@ -16,12 +16,10 @@ class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -29,7 +27,7 @@ class ForEachActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param is_sequential: Should the loop be executed in sequence or in parallel (max 50) @@ -37,9 +35,9 @@ class ForEachActivity(ControlActivity): :param batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :type batch_count: int - :param items: Required. Collection to iterate. + :param items: Collection to iterate. :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . + :param activities: List of activities to execute . :type activities: list[~azure.mgmt.datafactory.models.Activity] """ @@ -64,10 +62,10 @@ class ForEachActivity(ControlActivity): 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, } - def __init__(self, **kwargs): - super(ForEachActivity, self).__init__(**kwargs) - self.is_sequential = kwargs.get('is_sequential', None) - self.batch_count = kwargs.get('batch_count', None) - self.items = kwargs.get('items', None) - self.activities = kwargs.get('activities', None) + def __init__(self, name, items, activities, additional_properties=None, description=None, depends_on=None, user_properties=None, is_sequential=None, batch_count=None): + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py deleted file mode 100644 index 7c5c887bb1d9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class ForEachActivity(ControlActivity): - """This activity is used for iterating over a collection and execute given - activities. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param is_sequential: Should the loop be executed in sequence or in - parallel (max 50) - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of - parallel execution (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.is_sequential = is_sequential - self.batch_count = batch_count - self.items = items - self.activities = activities - self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py index d5213138b96a..0d6da105b65b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py @@ -15,12 +15,10 @@ class FormatReadSettings(Model): """Format read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str """ @@ -33,7 +31,7 @@ class FormatReadSettings(Model): 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, **kwargs): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) + def __init__(self, type, additional_properties=None): + super(FormatReadSettings, self).__init__() + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py deleted file mode 100644 index 326da0277b89..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FormatReadSettings(Model): - """Format read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py index 2100c6055d0d..41b154dae4de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py @@ -15,12 +15,10 @@ class FormatWriteSettings(Model): """Format write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. + :param type: The write setting type. :type type: str """ @@ -33,7 +31,7 @@ class FormatWriteSettings(Model): 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, **kwargs): - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) + def __init__(self, type, additional_properties=None): + super(FormatWriteSettings, self).__init__() + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py deleted file mode 100644 index 4150eceffc1c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class FormatWriteSettings(Model): - """Format write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: - super(FormatWriteSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py index e023f9ae91f7..47b77a237ea2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py @@ -15,12 +15,10 @@ class FtpReadSettings(StoreReadSettings): """Ftp read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -55,9 +53,9 @@ class FtpReadSettings(StoreReadSettings): 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } - def __init__(self, **kwargs): - super(FtpReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.use_binary_transfer = kwargs.get('use_binary_transfer', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer=None): + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py deleted file mode 100644 index 748d306307ac..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class FtpReadSettings(StoreReadSettings): - """Ftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Ftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param use_binary_transfer: Specify whether to use binary transfer mode - for FTP stores. - :type use_binary_transfer: bool - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py index e649ca56e37c..51bf170dd8c2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -15,8 +15,6 @@ class FtpServerLinkedService(LinkedService): """A FTP server Linked Service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class FtpServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. Host name of the FTP server. Type: string (or - Expression with resultType string). + :param host: Host name of the FTP server. Type: string (or Expression with + resultType string). :type host: object :param port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with @@ -85,14 +83,14 @@ class FtpServerLinkedService(LinkedService): 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } - def __init__(self, **kwargs): - super(FtpServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None): + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py deleted file mode 100644 index b38ad1c03f46..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. Host name of the FTP server. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for - client connections. Default value is 21. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS - channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP - server SSL certificate when connect over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.enable_ssl = enable_ssl - self.enable_server_certificate_validation = enable_server_certificate_validation - self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py index 5d5e933036df..61ef5bef42cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -15,12 +15,10 @@ class FtpServerLocation(DatasetLocation): """The location of ftp server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -34,12 +32,5 @@ class FtpServerLocation(DatasetLocation): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(FtpServerLocation, self).__init__(**kwargs) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py deleted file mode 100644 index ac296bcfca31..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class FtpServerLocation(DatasetLocation): - """The location of ftp server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py index 7941189f2dcd..7f08bf98769d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py @@ -15,12 +15,10 @@ class GetMetadataActivity(ExecutionActivity): """Activity to get metadata of dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,14 +26,14 @@ class GetMetadataActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. + :param dataset: GetMetadata activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] @@ -60,8 +58,8 @@ class GetMetadataActivity(ExecutionActivity): 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, } - def __init__(self, **kwargs): - super(GetMetadataActivity, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) - self.field_list = kwargs.get('field_list', None) + def __init__(self, name, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None): + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.dataset = dataset + self.field_list = field_list self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py deleted file mode 100644 index b4d8eb17cab1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.dataset = dataset - self.field_list = field_list - self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py index 1be4a2afece0..5e37054dfd10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py @@ -23,6 +23,6 @@ class GetSsisObjectMetadataRequest(Model): 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, } - def __init__(self, **kwargs): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = kwargs.get('metadata_path', None) + def __init__(self, metadata_path=None): + super(GetSsisObjectMetadataRequest, self).__init__() + self.metadata_path = metadata_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py deleted file mode 100644 index 310cd9783d81..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GetSsisObjectMetadataRequest(Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } - - def __init__(self, *, metadata_path: str=None, **kwargs) -> None: - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = metadata_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py index cadecdf70f44..7470e856228d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py @@ -15,14 +15,11 @@ class GitHubAccessTokenRequest(Model): """Get GitHub access token request definition. - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. + :param git_hub_access_code: GitHub access code. :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base - URL. + :param git_hub_access_token_base_url: GitHub access token base URL. :type git_hub_access_token_base_url: str """ @@ -37,8 +34,8 @@ class GitHubAccessTokenRequest(Model): 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } - def __init__(self, **kwargs): - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = kwargs.get('git_hub_access_code', None) - self.git_hub_client_id = kwargs.get('git_hub_client_id', None) - self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) + def __init__(self, git_hub_access_code, git_hub_access_token_base_url, git_hub_client_id=None): + super(GitHubAccessTokenRequest, self).__init__() + self.git_hub_access_code = git_hub_access_code + self.git_hub_client_id = git_hub_client_id + self.git_hub_access_token_base_url = git_hub_access_token_base_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py deleted file mode 100644 index 7961e1bc33ed..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GitHubAccessTokenRequest(Model): - """Get GitHub access token request definition. - - All required parameters must be populated in order to send to Azure. - - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base - URL. - :type git_hub_access_token_base_url: str - """ - - _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, - } - - _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, - } - - def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = git_hub_access_code - self.git_hub_client_id = git_hub_client_id - self.git_hub_access_token_base_url = git_hub_access_token_base_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py index 4a4afce8f0f0..e1636ec59bb0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py @@ -23,6 +23,6 @@ class GitHubAccessTokenResponse(Model): 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, } - def __init__(self, **kwargs): - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = kwargs.get('git_hub_access_token', None) + def __init__(self, git_hub_access_token=None): + super(GitHubAccessTokenResponse, self).__init__() + self.git_hub_access_token = git_hub_access_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py deleted file mode 100644 index 4f28ade6e914..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py +++ /dev/null @@ -1,28 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class GitHubAccessTokenResponse(Model): - """Get GitHub access token response definition. - - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str - """ - - _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, - } - - def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = git_hub_access_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py index c460dd95c380..4b3e55b5c3b2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -15,8 +15,6 @@ class GoogleAdWordsLinkedService(LinkedService): """Google AdWords service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,18 +29,17 @@ class GoogleAdWordsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param client_customer_id: Required. The Client customer ID of the AdWords - account that you want to fetch report data for. + :param client_customer_id: The Client customer ID of the AdWords account + that you want to fetch report data for. :type client_customer_id: object - :param developer_token: Required. The developer token associated with the - manager account that you use to grant access to the AdWords API. + :param developer_token: The developer token associated with the manager + account that you use to grant access to the AdWords API. :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' + :param authentication_type: The OAuth 2.0 authentication mechanism used + for authentication. ServiceAuthentication can only be used on self-hosted + IR. Possible values include: 'ServiceAuthentication', 'UserAuthentication' :type authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :param refresh_token: The refresh token obtained from Google for @@ -103,17 +100,17 @@ class GoogleAdWordsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.client_customer_id = kwargs.get('client_customer_id', None) - self.developer_token = kwargs.get('developer_token', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None): + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py deleted file mode 100644 index dfb3bc07e69f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py +++ /dev/null @@ -1,119 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_customer_id: Required. The Client customer ID of the AdWords - account that you want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the - manager account that you use to grant access to the AdWords API. - :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to AdWords for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_customer_id = client_customer_id - self.developer_token = developer_token - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py index 92b901b774ed..74ef1a12fa04 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -15,8 +15,6 @@ class GoogleAdWordsObjectDataset(Dataset): """Google AdWords service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class GoogleAdWordsObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class GoogleAdWordsObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class GoogleAdWordsObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py deleted file mode 100644 index e1272f978b8e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py index 8699057abe09..e9b8fbf213ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -15,8 +15,6 @@ class GoogleAdWordsSource(CopySource): """A copy activity Google AdWords service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class GoogleAdWordsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class GoogleAdWordsSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GoogleAdWordsSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py deleted file mode 100644 index 995d5324670b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class GoogleAdWordsSource(CopySource): - """A copy activity Google AdWords service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py index 45a535b95d43..c1cbbc8f7c9e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -15,8 +15,6 @@ class GoogleBigQueryLinkedService(LinkedService): """Google BigQuery service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class GoogleBigQueryLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param project: Required. The default BigQuery project to query against. + :param project: The default BigQuery project to query against. :type project: object :param additional_projects: A comma-separated list of public BigQuery projects to access. @@ -43,10 +41,9 @@ class GoogleBigQueryLinkedService(LinkedService): that combine BigQuery data with data from Google Drive. The default value is false. :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' + :param authentication_type: The OAuth 2.0 authentication mechanism used + for authentication. ServiceAuthentication can only be used on self-hosted + IR. Possible values include: 'ServiceAuthentication', 'UserAuthentication' :type authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :param refresh_token: The refresh token obtained from Google for @@ -107,18 +104,18 @@ class GoogleBigQueryLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.project = kwargs.get('project', None) - self.additional_projects = kwargs.get('additional_projects', None) - self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, project, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None): + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py deleted file mode 100644 index 146674a85531..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery - projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google - Drive. Allowing Google Drive access enables support for federated tables - that combine BigQuery data with data from Google Drive. The default value - is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication - mechanism used for authentication. ServiceAuthentication can only be used - on self-hosted IR. Possible values include: 'ServiceAuthentication', - 'UserAuthentication' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for - authorizing access to BigQuery for UserAuthentication. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id of the google application used to acquire - the refresh token. - :type client_id: ~azure.mgmt.datafactory.models.SecretBase - :param client_secret: The client secret of the google application used to - acquire the refresh token. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param email: The service account email ID that is used for - ServiceAuthentication and can only be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to - authenticate the service account email address and can only be used on - self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.project = project - self.additional_projects = additional_projects - self.request_google_drive_scope = request_google_drive_scope - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py index 920489742bbf..d891958c67ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py @@ -15,8 +15,6 @@ class GoogleBigQueryObjectDataset(Dataset): """Google BigQuery service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class GoogleBigQueryObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class GoogleBigQueryObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using database + table properties instead. @@ -74,9 +72,9 @@ class GoogleBigQueryObjectDataset(Dataset): 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.dataset = kwargs.get('dataset', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None): + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.dataset = dataset self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py deleted file mode 100644 index 205819f8eeef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - database + table properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or - Expression with resultType string). - :type dataset: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.dataset = dataset - self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py index 3a28d2563a8b..0322e3b64bea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -15,8 +15,6 @@ class GoogleBigQuerySource(CopySource): """A copy activity Google BigQuery service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class GoogleBigQuerySource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class GoogleBigQuerySource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GoogleBigQuerySource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py deleted file mode 100644 index 49364b4d0e3f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class GoogleBigQuerySource(CopySource): - """A copy activity Google BigQuery service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py index 57913f779ca1..24b39e36cc26 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -15,8 +15,6 @@ class GreenplumLinkedService(LinkedService): """Greenplum Database linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class GreenplumLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -61,9 +59,9 @@ class GreenplumLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GreenplumLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py deleted file mode 100644 index bd707a5e85c9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py index 086f12419f4a..06ad92aab216 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -15,8 +15,6 @@ class GreenplumSource(CopySource): """A copy activity Greenplum Database source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class GreenplumSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class GreenplumSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GreenplumSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py deleted file mode 100644 index 8b789deb43da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class GreenplumSource(CopySource): - """A copy activity Greenplum Database source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py index eb0ea08ee544..500671398856 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py @@ -15,8 +15,6 @@ class GreenplumTableDataset(Dataset): """Greenplum Database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class GreenplumTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class GreenplumTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class GreenplumTableDataset(Dataset): 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(GreenplumTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None): + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.greenplum_table_dataset_schema = greenplum_table_dataset_schema self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py deleted file mode 100644 index 7f37fff9108d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression - with resultType string). - :type table: object - :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: - string (or Expression with resultType string). - :type greenplum_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.greenplum_table_dataset_schema = greenplum_table_dataset_schema - self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py index b6affd5caa0d..acf0d8cbd35d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -15,8 +15,6 @@ class HBaseLinkedService(LinkedService): """HBase server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class HBaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The IP address or host name of the HBase server. - (i.e. 192.168.222.160) + :param host: The IP address or host name of the HBase server. (i.e. + 192.168.222.160) :type host: object :param port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. @@ -42,9 +40,8 @@ class HBaseLinkedService(LinkedService): :param http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) :type http_path: object - :param authentication_type: Required. The authentication mechanism to use - to connect to the HBase server. Possible values include: 'Anonymous', - 'Basic' + :param authentication_type: The authentication mechanism to use to connect + to the HBase server. Possible values include: 'Anonymous', 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. @@ -98,17 +95,17 @@ class HBaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HBaseLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py deleted file mode 100644 index a8823e2e8937..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HBaseLinkedService(LinkedService): - """HBase server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the HBase server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the HBase instance uses to listen for - client connections. The default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version) - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use - to connect to the HBase server. Possible values include: 'Anonymous', - 'Basic' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py index 5de32bcb6871..acc6ca54817c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py @@ -15,8 +15,6 @@ class HBaseObjectDataset(Dataset): """HBase server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class HBaseObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class HBaseObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class HBaseObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HBaseObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py deleted file mode 100644 index 27fc0d1514ea..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HBaseObjectDataset(Dataset): - """HBase server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py index eb6e3f1789bb..e1fcefaac0a7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -15,8 +15,6 @@ class HBaseSource(CopySource): """A copy activity HBase server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class HBaseSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class HBaseSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HBaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py deleted file mode 100644 index b2680e95c212..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HBaseSource(CopySource): - """A copy activity HBase server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py index 4110b0f8b7de..43053ab05290 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py @@ -15,12 +15,10 @@ class HDInsightHiveActivity(ExecutionActivity): """HDInsight Hive activity type. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class HDInsightHiveActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -83,14 +81,14 @@ class HDInsightHiveActivity(ExecutionActivity): 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, } - def __init__(self, **kwargs): - super(HDInsightHiveActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.variables = kwargs.get('variables', None) - self.query_timeout = kwargs.get('query_timeout', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout=None): + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py deleted file mode 100644 index f8a5441fe767..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when - the HDInsight cluster is with ESP (Enterprise Security Package) - :type query_timeout: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.variables = variables - self.query_timeout = query_timeout - self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py index 810525342d82..673eee3ff5ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -15,8 +15,6 @@ class HDInsightLinkedService(LinkedService): """HDInsight linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class HDInsightLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or - Expression with resultType string). + :param cluster_uri: HDInsight cluster URI. Type: string (or Expression + with resultType string). :type cluster_uri: object :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). @@ -83,14 +81,14 @@ class HDInsightLinkedService(LinkedService): 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HDInsightLinkedService, self).__init__(**kwargs) - self.cluster_uri = kwargs.get('cluster_uri', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.is_esp_enabled = kwargs.get('is_esp_enabled', None) - self.file_system = kwargs.get('file_system', None) + def __init__(self, cluster_uri, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None): + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.file_system = file_system self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py deleted file mode 100644 index 5c384f7d6288..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or - Expression with resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked - service that points to the HCatalog database. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP - (Enterprise Security Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the - HDInsight is ADLS Gen2. Type: string (or Expression with resultType - string). - :type file_system: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, - } - - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.cluster_uri = cluster_uri - self.user_name = user_name - self.password = password - self.linked_service_name = linked_service_name - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.encrypted_credential = encrypted_credential - self.is_esp_enabled = is_esp_enabled - self.file_system = file_system - self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py index 20655843e1db..7b604ae76697 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py @@ -15,12 +15,10 @@ class HDInsightMapReduceActivity(ExecutionActivity): """HDInsight MapReduce activity type. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class HDInsightMapReduceActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -44,10 +42,10 @@ class HDInsightMapReduceActivity(ExecutionActivity): 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with - resultType string). + :param class_name: Class name. Type: string (or Expression with resultType + string). :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with + :param jar_file_path: Jar path. Type: string (or Expression with resultType string). :type jar_file_path: object :param jar_linked_service: Jar linked service reference. @@ -86,14 +84,14 @@ class HDInsightMapReduceActivity(ExecutionActivity): 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } - def __init__(self, **kwargs): - super(HDInsightMapReduceActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.class_name = kwargs.get('class_name', None) - self.jar_file_path = kwargs.get('jar_file_path', None) - self.jar_linked_service = kwargs.get('jar_linked_service', None) - self.jar_libs = kwargs.get('jar_libs', None) - self.defines = kwargs.get('defines', None) + def __init__(self, name, class_name, jar_file_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None): + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py deleted file mode 100644 index dffa9f119069..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with - resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with - resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job - request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.class_name = class_name - self.jar_file_path = jar_file_path - self.jar_linked_service = jar_linked_service - self.jar_libs = jar_libs - self.defines = defines - self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py index d386aac9d9aa..c9154860ec49 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -15,8 +15,6 @@ class HDInsightOnDemandLinkedService(LinkedService): """HDInsight ondemand linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,37 +29,37 @@ class HDInsightOnDemandLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param cluster_size: Required. Number of worker/data nodes in the cluster. + :param cluster_size: Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand - HDInsight cluster. Specifies how long the on-demand HDInsight cluster - stays alive after completion of an activity run if there are no other - active jobs in the cluster. The minimum value is 5 mins. Type: string (or - Expression with resultType string). + :param time_to_live: The allowed idle time for the on-demand HDInsight + cluster. Specifies how long the on-demand HDInsight cluster stays alive + after completion of an activity run if there are no other active jobs in + the cluster. The minimum value is 5 mins. Type: string (or Expression with + resultType string). :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string - (or Expression with resultType string). + :param version: Version of the HDInsight cluster.  Type: string (or + Expression with resultType string). :type version: object - :param linked_service_name: Required. Azure Storage linked service to be - used by the on-demand cluster for storing and processing data. + :param linked_service_name: Azure Storage linked service to be used by the + on-demand cluster for storing and processing data. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host - the cluster. Type: string (or Expression with resultType string). + :param host_subscription_id: The customer’s subscription to host the + cluster. Type: string (or Expression with resultType string). :type host_subscription_id: object :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key for the service principal id. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal - belongs. Type: string (or Expression with resultType string). + :param tenant: The Tenant id/name to which the service principal belongs. + Type: string (or Expression with resultType string). :type tenant: object - :param cluster_resource_group: Required. The resource group where the - cluster belongs. Type: string (or Expression with resultType string). + :param cluster_resource_group: The resource group where the cluster + belongs. Type: string (or Expression with resultType string). :type cluster_resource_group: object :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType @@ -199,39 +197,39 @@ class HDInsightOnDemandLinkedService(LinkedService): 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) - self.cluster_size = kwargs.get('cluster_size', None) - self.time_to_live = kwargs.get('time_to_live', None) - self.version = kwargs.get('version', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.host_subscription_id = kwargs.get('host_subscription_id', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.cluster_resource_group = kwargs.get('cluster_resource_group', None) - self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) - self.cluster_user_name = kwargs.get('cluster_user_name', None) - self.cluster_password = kwargs.get('cluster_password', None) - self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) - self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) - self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.cluster_type = kwargs.get('cluster_type', None) - self.spark_version = kwargs.get('spark_version', None) - self.core_configuration = kwargs.get('core_configuration', None) - self.h_base_configuration = kwargs.get('h_base_configuration', None) - self.hdfs_configuration = kwargs.get('hdfs_configuration', None) - self.hive_configuration = kwargs.get('hive_configuration', None) - self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) - self.oozie_configuration = kwargs.get('oozie_configuration', None) - self.storm_configuration = kwargs.get('storm_configuration', None) - self.yarn_configuration = kwargs.get('yarn_configuration', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.head_node_size = kwargs.get('head_node_size', None) - self.data_node_size = kwargs.get('data_node_size', None) - self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) - self.script_actions = kwargs.get('script_actions', None) - self.virtual_network_id = kwargs.get('virtual_network_id', None) - self.subnet_name = kwargs.get('subnet_name', None) + def __init__(self, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None): + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py deleted file mode 100644 index 178585c9b51d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py +++ /dev/null @@ -1,237 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param cluster_size: Required. Number of worker/data nodes in the cluster. - Suggestion value: 4. Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand - HDInsight cluster. Specifies how long the on-demand HDInsight cluster - stays alive after completion of an activity run if there are no other - active jobs in the cluster. The minimum value is 5 mins. Type: string (or - Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string - (or Expression with resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be - used by the on-demand cluster for storing and processing data. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host - the cluster. Type: string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the - hostSubscriptionId. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal - belongs. Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the - cluster belongs. Type: string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be - distinct with timestamp. Type: string (or Expression with resultType - string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string - (or Expression with resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to - cluster’s node (for Linux). Type: string (or Expression with resultType - string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect - cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase - :param additional_linked_service_names: Specifies additional storage - accounts for the HDInsight linked service so that the Data Factory service - can register them on your behalf. - :type additional_linked_service_names: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service - that point to the HCatalog database. The on-demand HDInsight cluster is - created by using the Azure SQL database as the metastore. - :type hcatalog_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with - resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. - Type: string (or Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as - in core-site.xml) for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters - (hbase-site.xml) for the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters - (hdfs-site.xml) for the HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters - (hive-site.xml) for the HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration - parameters (mapred-site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters - (oozie-site.xml) for the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters - (storm-site.xml) for the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters - (yarn-site.xml) for the HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the - HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the - HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for - the HDInsight cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand - cluster once it's up. Please refer to - https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the - cluster should be joined after creation. Type: string (or Expression with - resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If - virtualNetworkId was specified, then this property is required. Type: - string (or Expression with resultType string). - :type subnet_name: object - """ - - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, - } - - def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.cluster_size = cluster_size - self.time_to_live = time_to_live - self.version = version - self.linked_service_name = linked_service_name - self.host_subscription_id = host_subscription_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.cluster_resource_group = cluster_resource_group - self.cluster_name_prefix = cluster_name_prefix - self.cluster_user_name = cluster_user_name - self.cluster_password = cluster_password - self.cluster_ssh_user_name = cluster_ssh_user_name - self.cluster_ssh_password = cluster_ssh_password - self.additional_linked_service_names = additional_linked_service_names - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.cluster_type = cluster_type - self.spark_version = spark_version - self.core_configuration = core_configuration - self.h_base_configuration = h_base_configuration - self.hdfs_configuration = hdfs_configuration - self.hive_configuration = hive_configuration - self.map_reduce_configuration = map_reduce_configuration - self.oozie_configuration = oozie_configuration - self.storm_configuration = storm_configuration - self.yarn_configuration = yarn_configuration - self.encrypted_credential = encrypted_credential - self.head_node_size = head_node_size - self.data_node_size = data_node_size - self.zookeeper_node_size = zookeeper_node_size - self.script_actions = script_actions - self.virtual_network_id = virtual_network_id - self.subnet_name = subnet_name - self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py index 61b939076db6..50fec511ad70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py @@ -15,12 +15,10 @@ class HDInsightPigActivity(ExecutionActivity): """HDInsight Pig activity type. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class HDInsightPigActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -76,12 +74,12 @@ class HDInsightPigActivity(ExecutionActivity): 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } - def __init__(self, **kwargs): - super(HDInsightPigActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None): + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py deleted file mode 100644 index fb149df91f39..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with - resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py index 7822344f012f..e4c28dfb2ee9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py @@ -15,12 +15,10 @@ class HDInsightSparkActivity(ExecutionActivity): """HDInsight Spark activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,19 +26,19 @@ class HDInsightSparkActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for - all the job’s files. Type: string (or Expression with resultType string). + :param root_path: The root path in 'sparkJobLinkedService' for all the + job’s files. Type: string (or Expression with resultType string). :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of - the code/package to be executed. Type: string (or Expression with - resultType string). + :param entry_file_path: The relative path to the root folder of the + code/package to be executed. Type: string (or Expression with resultType + string). :type entry_file_path: object :param arguments: The user-specified arguments to HDInsightSparkActivity. :type arguments: list[object] @@ -87,14 +85,14 @@ class HDInsightSparkActivity(ExecutionActivity): 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, } - def __init__(self, **kwargs): - super(HDInsightSparkActivity, self).__init__(**kwargs) - self.root_path = kwargs.get('root_path', None) - self.entry_file_path = kwargs.get('entry_file_path', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) - self.class_name = kwargs.get('class_name', None) - self.proxy_user = kwargs.get('proxy_user', None) - self.spark_config = kwargs.get('spark_config', None) + def __init__(self, name, root_path, entry_file_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name=None, proxy_user=None, spark_config=None): + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py deleted file mode 100644 index 3f305901abb7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for - all the job’s files. Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of - the code/package to be executed. Type: string (or Expression with - resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading - the entry file and dependencies, and for receiving logs. - :type spark_job_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. - Type: string (or Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, - } - - def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.root_path = root_path - self.entry_file_path = entry_file_path - self.arguments = arguments - self.get_debug_info = get_debug_info - self.spark_job_linked_service = spark_job_linked_service - self.class_name = class_name - self.proxy_user = proxy_user - self.spark_config = spark_config - self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py index 42146a5d6cc6..cd4bca1c142a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py @@ -15,12 +15,10 @@ class HDInsightStreamingActivity(ExecutionActivity): """HDInsight streaming activity type. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class HDInsightStreamingActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -44,20 +42,19 @@ class HDInsightStreamingActivity(ExecutionActivity): 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or - Expression with resultType string). + :param mapper: Mapper executable name. Type: string (or Expression with + resultType string). :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or - Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with + :param reducer: Reducer executable name. Type: string (or Expression with resultType string). + :type reducer: object + :param input: Input blob path. Type: string (or Expression with resultType + string). :type input: object - :param output: Required. Output blob path. Type: string (or Expression - with resultType string). + :param output: Output blob path. Type: string (or Expression with + resultType string). :type output: object - :param file_paths: Required. Paths to streaming job files. Can be - directories. + :param file_paths: Paths to streaming job files. Can be directories. :type file_paths: list[object] :param file_linked_service: Linked service reference where the files are located. @@ -105,18 +102,18 @@ class HDInsightStreamingActivity(ExecutionActivity): 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } - def __init__(self, **kwargs): - super(HDInsightStreamingActivity, self).__init__(**kwargs) - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.mapper = kwargs.get('mapper', None) - self.reducer = kwargs.get('reducer', None) - self.input = kwargs.get('input', None) - self.output = kwargs.get('output', None) - self.file_paths = kwargs.get('file_paths', None) - self.file_linked_service = kwargs.get('file_linked_service', None) - self.combiner = kwargs.get('combiner', None) - self.command_environment = kwargs.get('command_environment', None) - self.defines = kwargs.get('defines', None) + def __init__(self, name, mapper, reducer, input, output, file_paths, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None): + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py deleted file mode 100644 index 2f5a301ff880..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: 'None', - 'Always', 'Failure' - :type get_debug_info: str or - ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or - Expression with resultType string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or - Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with - resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression - with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be - directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are - located. - :type file_linked_service: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression - with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - } - - def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.mapper = mapper - self.reducer = reducer - self.input = input - self.output = output - self.file_paths = file_paths - self.file_linked_service = file_linked_service - self.combiner = combiner - self.command_environment = command_environment - self.defines = defines - self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py index b527f05a7e2f..b403e8108e6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -15,8 +15,6 @@ class HdfsLinkedService(LinkedService): """Hadoop Distributed File System (HDFS) linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class HdfsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The URL of the HDFS service endpoint, e.g. + :param url: The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). :type url: object @@ -71,11 +69,11 @@ class HdfsLinkedService(LinkedService): 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, } - def __init__(self, **kwargs): - super(HdfsLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None): + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py deleted file mode 100644 index e004701e1da0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - HDFS. Possible values are: Anonymous and Windows. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.encrypted_credential = encrypted_credential - self.user_name = user_name - self.password = password - self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py index a8f5d1ba332c..3d48a66895f9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -15,12 +15,10 @@ class HdfsLocation(DatasetLocation): """The location of HDFS. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -34,12 +32,5 @@ class HdfsLocation(DatasetLocation): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(HdfsLocation, self).__init__(**kwargs) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py deleted file mode 100644 index 2e07575bef0f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class HdfsLocation(DatasetLocation): - """The location of HDFS. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py index ec4b98c50385..ece39f76e7c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py @@ -15,12 +15,10 @@ class HdfsReadSettings(StoreReadSettings): """HDFS read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -66,12 +64,12 @@ class HdfsReadSettings(StoreReadSettings): 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, **kwargs): - super(HdfsReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) - self.distcp_settings = kwargs.get('distcp_settings', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None): + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py deleted file mode 100644 index c37a045ec93c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class HdfsReadSettings(StoreReadSettings): - """HDFS read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: HDFS wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition - discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py index be50590f6c32..1a859d544ef2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -15,8 +15,6 @@ class HdfsSource(CopySource): """A copy activity HDFS source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class HdfsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType @@ -55,8 +53,8 @@ class HdfsSource(CopySource): 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, **kwargs): - super(HdfsSource, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.distcp_settings = kwargs.get('distcp_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None): + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.distcp_settings = distcp_settings self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py deleted file mode 100644 index 3c60cab46289..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HdfsSource(CopySource): - """A copy activity HDFS source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param distcp_settings: Specifies Distcp-related settings. - :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.distcp_settings = distcp_settings - self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py index c54c1393d56e..f7d8ff85b98f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -15,8 +15,6 @@ class HiveLinkedService(LinkedService): """Hive Server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,11 +29,10 @@ class HiveLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. IP address or host name of the Hive server, - separated by ';' for multiple hosts (only when serviceDiscoveryMode is - enable). + :param host: IP address or host name of the Hive server, separated by ';' + for multiple hosts (only when serviceDiscoveryMode is enable). :type host: object :param port: The TCP port that the Hive server uses to listen for client connections. @@ -47,8 +44,8 @@ class HiveLinkedService(LinkedService): Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' :type thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Hive server. Possible values include: 'Anonymous', 'Username', + :param authentication_type: The authentication method used to access the + Hive server. Possible values include: 'Anonymous', 'Username', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType @@ -125,23 +122,23 @@ class HiveLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HiveLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.service_discovery_mode = kwargs.get('service_discovery_mode', None) - self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) - self.use_native_query = kwargs.get('use_native_query', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py deleted file mode 100644 index 611d30ecb781..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py +++ /dev/null @@ -1,147 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HiveLinkedService(LinkedService): - """Hive Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Hive server, - separated by ';' for multiple hosts (only when serviceDiscoveryMode is - enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client - connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: - 'HiveServer1', 'HiveServer2', 'HiveThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Hive server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper - service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive - Server 2 nodes are added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL - queries,or converts them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.service_discovery_mode = service_discovery_mode - self.zoo_keeper_name_space = zoo_keeper_name_space - self.use_native_query = use_native_query - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py index 07b6f2b54901..2521f219743a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py @@ -15,8 +15,6 @@ class HiveObjectDataset(Dataset): """Hive Server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class HiveObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class HiveObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class HiveObjectDataset(Dataset): 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HiveObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None): + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.hive_object_dataset_schema = hive_object_dataset_schema self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py deleted file mode 100644 index 69384bdfa99a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HiveObjectDataset(Dataset): - """Hive Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with - resultType string). - :type table: object - :param hive_object_dataset_schema: The schema name of the Hive. Type: - string (or Expression with resultType string). - :type hive_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.hive_object_dataset_schema = hive_object_dataset_schema - self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py index 3af88c3280e3..0dc4aa8e5aed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -15,8 +15,6 @@ class HiveSource(CopySource): """A copy activity Hive Server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class HiveSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class HiveSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HiveSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py deleted file mode 100644 index 6c09191b8c1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HiveSource(CopySource): - """A copy activity Hive Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py index f2184dea151f..9df5a098e0c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py @@ -15,8 +15,6 @@ class HttpDataset(Dataset): """A file in an HTTP web server. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class HttpDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class HttpDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with @@ -88,12 +86,12 @@ class HttpDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(HttpDataset, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.format = kwargs.get('format', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None): + super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.format = format + self.compression = compression self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py deleted file mode 100644 index 09f97a03a95d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HttpDataset(Dataset): - """A file in an HTTP web server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL based on the URL in the - HttpLinkedService refers to an HTTP file Type: string (or Expression with - resultType string). - :type relative_url: object - :param request_method: The HTTP method for the HTTP request. Type: string - (or Expression with resultType string). - :type request_method: object - :param request_body: The body for the HTTP request. Type: string (or - Expression with resultType string). - :type request_body: object - :param additional_headers: The headers for the HTTP Request. e.g. - request-header-name-1:request-header-value-1 - ... - request-header-name-n:request-header-value-n Type: string (or Expression - with resultType string). - :type additional_headers: object - :param format: The format of files. - :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat - :param compression: The data compression method used on files. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: - super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.format = format - self.compression = compression - self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py index 6232bc45fee4..0459cf8ea792 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -15,8 +15,6 @@ class HttpLinkedService(LinkedService): """Linked service for an HTTP source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class HttpLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The base URL of the HTTP endpoint, e.g. + :param url: The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). :type url: object @@ -92,14 +90,14 @@ class HttpLinkedService(LinkedService): 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HttpLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.embedded_cert_data = kwargs.get('embedded_cert_data', None) - self.cert_thumbprint = kwargs.get('cert_thumbprint', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None): + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint + self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py deleted file mode 100644 index 7f70adb08425..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py +++ /dev/null @@ -1,105 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the HTTP endpoint, e.g. - http://www.microsoft.com. Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: The authentication type to be used to connect - to the HTTP server. Possible values include: 'Basic', 'Anonymous', - 'Digest', 'Windows', 'ClientCertificate' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. - Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate - with EmbeddedCertData authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for - ClientCertificate authentication. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate - authentication. Only valid for on-premises copy. For on-premises copy with - ClientCertificate authentication, either CertThumbprint or - EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS - server SSL certificate. Default value is true. Type: boolean (or - Expression with resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.embedded_cert_data = embedded_cert_data - self.cert_thumbprint = cert_thumbprint - self.encrypted_credential = encrypted_credential - self.enable_server_certificate_validation = enable_server_certificate_validation - self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py index a7c175da3489..d12ae2718ba1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py @@ -15,12 +15,10 @@ class HttpReadSettings(StoreReadSettings): """Sftp read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -55,9 +53,9 @@ class HttpReadSettings(StoreReadSettings): 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HttpReadSettings, self).__init__(**kwargs) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.request_timeout = kwargs.get('request_timeout', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None): + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py deleted file mode 100644 index 7cea9207c996..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class HttpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param request_timeout: Specifies the timeout for a HTTP client to get - HTTP response from HTTP server. - :type request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py index 94106fae9d15..ece12af73d55 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -15,12 +15,10 @@ class HttpServerLocation(DatasetLocation): """The location of http server. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -45,6 +43,6 @@ class HttpServerLocation(DatasetLocation): 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HttpServerLocation, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, relative_url=None): + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py deleted file mode 100644 index c52c53dcf357..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class HttpServerLocation(DatasetLocation): - """The location of http server. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param relative_url: Specify the relativeUrl of http server. Type: string - (or Expression with resultType string) - :type relative_url: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: - super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) - self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py index ae131aa16c8c..d27f1f9f7008 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -15,8 +15,6 @@ class HttpSource(CopySource): """A copy activity source for an HTTP file. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class HttpSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to @@ -54,7 +52,7 @@ class HttpSource(CopySource): 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HttpSource, self).__init__(**kwargs) - self.http_request_timeout = kwargs.get('http_request_timeout', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None): + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.http_request_timeout = http_request_timeout self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py deleted file mode 100644 index df339fc3aef7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HttpSource(CopySource): - """A copy activity source for an HTTP file. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param http_request_timeout: Specifies the timeout for a HTTP client to - get HTTP response from HTTP server. The default value is equivalent to - System.Net.HttpWebRequest.Timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.http_request_timeout = http_request_timeout - self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py index 3d0d6cb3a6f4..be34343b5040 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -15,8 +15,6 @@ class HubspotLinkedService(LinkedService): """Hubspot Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,9 @@ class HubspotLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param client_id: Required. The client ID associated with your Hubspot - application. + :param client_id: The client ID associated with your Hubspot application. :type client_id: object :param client_secret: The client secret associated with your Hubspot application. @@ -83,14 +80,14 @@ class HubspotLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HubspotLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.access_token = kwargs.get('access_token', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py deleted file mode 100644 index 272d613e9cd1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py +++ /dev/null @@ -1,96 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with your Hubspot - application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token obtained when initially - authenticating your OAuth integration. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param refresh_token: The refresh token obtained when initially - authenticating your OAuth integration. - :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.client_secret = client_secret - self.access_token = access_token - self.refresh_token = refresh_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py index ce8994b4db4a..4cd69a32be47 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py @@ -15,8 +15,6 @@ class HubspotObjectDataset(Dataset): """Hubspot Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class HubspotObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class HubspotObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class HubspotObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HubspotObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py deleted file mode 100644 index bd2309101f72..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py index b4b4c618c33e..e72777465725 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -15,8 +15,6 @@ class HubspotSource(CopySource): """A copy activity Hubspot Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class HubspotSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class HubspotSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(HubspotSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py deleted file mode 100644 index a29811342ce0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class HubspotSource(CopySource): - """A copy activity Hubspot Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py index a8cb1da690e1..9983ad2d4ce6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py @@ -17,12 +17,10 @@ class IfConditionActivity(ControlActivity): activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -30,10 +28,10 @@ class IfConditionActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - This is used to determine the block of activities (ifTrueActivities or + :param expression: An expression that would evaluate to Boolean. This is + used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. :type expression: ~azure.mgmt.datafactory.models.Expression :param if_true_activities: List of activities to execute if expression is @@ -64,9 +62,9 @@ class IfConditionActivity(ControlActivity): 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, } - def __init__(self, **kwargs): - super(IfConditionActivity, self).__init__(**kwargs) - self.expression = kwargs.get('expression', None) - self.if_true_activities = kwargs.get('if_true_activities', None) - self.if_false_activities = kwargs.get('if_false_activities', None) + def __init__(self, name, expression, additional_properties=None, description=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None): + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py deleted file mode 100644 index 7921a2602807..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class IfConditionActivity(ControlActivity): - """This activity evaluates a boolean expression and executes either the - activities under the ifTrueActivities property or the ifFalseActivities - property depending on the result of the expression. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - This is used to determine the block of activities (ifTrueActivities or - ifFalseActivities) that will be executed. - :type expression: ~azure.mgmt.datafactory.models.Expression - :param if_true_activities: List of activities to execute if expression is - evaluated to true. This is an optional property and if not provided, the - activity will exit without any action. - :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] - :param if_false_activities: List of activities to execute if expression is - evaluated to false. This is an optional property and if not provided, the - activity will exit without any action. - :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.expression = expression - self.if_true_activities = if_true_activities - self.if_false_activities = if_false_activities - self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py index a704852652db..14a6a12e1615 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -15,8 +15,6 @@ class ImpalaLinkedService(LinkedService): """Impala server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,17 +29,16 @@ class ImpalaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The IP address or host name of the Impala server. - (i.e. 192.168.222.160) + :param host: The IP address or host name of the Impala server. (i.e. + 192.168.222.160) :type host: object :param port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. :type port: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Anonymous', 'SASLUsername', - 'UsernameAndPassword' + :param authentication_type: The authentication type to use. Possible + values include: 'Anonymous', 'SASLUsername', 'UsernameAndPassword' :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The @@ -101,17 +98,17 @@ class ImpalaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ImpalaLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py deleted file mode 100644 index 55b2e0c861d7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py +++ /dev/null @@ -1,117 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ImpalaLinkedService(LinkedService): - """Impala server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Impala server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Impala server uses to listen for client - connections. The default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Anonymous', 'SASLUsername', - 'UsernameAndPassword' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The - default value is anonymous when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using - UsernameAndPassword. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py index 8faee4f09240..f3033b55b65e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py @@ -15,8 +15,6 @@ class ImpalaObjectDataset(Dataset): """Impala server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ImpalaObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ImpalaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class ImpalaObjectDataset(Dataset): 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ImpalaObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None): + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.impala_object_dataset_schema = impala_object_dataset_schema self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py deleted file mode 100644 index 5652b5c9e4b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ImpalaObjectDataset(Dataset): - """Impala server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression - with resultType string). - :type table: object - :param impala_object_dataset_schema: The schema name of the Impala. Type: - string (or Expression with resultType string). - :type impala_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.impala_object_dataset_schema = impala_object_dataset_schema - self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py index 9e27dbdb6266..903649612c18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -15,8 +15,6 @@ class ImpalaSource(CopySource): """A copy activity Impala server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ImpalaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ImpalaSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ImpalaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py deleted file mode 100644 index f7dc4016d020..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ImpalaSource(CopySource): - """A copy activity Impala server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py index 2a58e7a0f7d3..7127ff0774e1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py @@ -15,8 +15,6 @@ class InformixLinkedService(LinkedService): """Informix linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class InformixLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: + :param connection_string: The non-access credential portion of the + connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the @@ -75,12 +73,12 @@ class InformixLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(InformixLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None): + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py deleted file mode 100644 index 03aadada664d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class InformixLinkedService(LinkedService): - """Informix linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Informix as ODBC data store. Possible values are: Anonymous and Basic. - Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py index c511f4ecc174..a5a6c03d13d5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py @@ -15,8 +15,6 @@ class InformixSink(CopySink): """A copy activity Informix sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -60,7 +58,7 @@ class InformixSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, **kwargs): - super(InformixSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py deleted file mode 100644 index b0681ec0d423..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class InformixSink(CopySink): - """A copy activity Informix sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py index 6cab908c7014..6dfce8fef7b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py @@ -15,8 +15,6 @@ class InformixSource(CopySource): """A copy activity source for Informix. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class InformixSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class InformixSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(InformixSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py deleted file mode 100644 index ed8fb0221239..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class InformixSource(CopySource): - """A copy activity source for Informix. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py index 8b7364bff652..7ce066b16b4d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py @@ -15,8 +15,6 @@ class InformixTableDataset(Dataset): """The Informix table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class InformixTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class InformixTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The Informix table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class InformixTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(InformixTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py deleted file mode 100644 index 05c458e797b1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class InformixTableDataset(Dataset): - """The Informix table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Informix table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py index 5dd45d16f76e..69e2792fda46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py @@ -19,14 +19,12 @@ class IntegrationRuntime(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -44,8 +42,8 @@ class IntegrationRuntime(Model): 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} } - def __init__(self, **kwargs): - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) + def __init__(self, additional_properties=None, description=None): + super(IntegrationRuntime, self).__init__() + self.additional_properties = additional_properties + self.description = description self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py index 12ed6925585e..e0582ea5cdf7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py @@ -26,7 +26,7 @@ class IntegrationRuntimeAuthKeys(Model): 'auth_key2': {'key': 'authKey2', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = kwargs.get('auth_key1', None) - self.auth_key2 = kwargs.get('auth_key2', None) + def __init__(self, auth_key1=None, auth_key2=None): + super(IntegrationRuntimeAuthKeys, self).__init__() + self.auth_key1 = auth_key1 + self.auth_key2 = auth_key2 diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py deleted file mode 100644 index b807d4cd5b55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeAuthKeys(Model): - """The integration runtime authentication keys. - - :param auth_key1: The primary integration runtime authentication key. - :type auth_key1: str - :param auth_key2: The secondary integration runtime authentication key. - :type auth_key2: str - """ - - _attribute_map = { - 'auth_key1': {'key': 'authKey1', 'type': 'str'}, - 'auth_key2': {'key': 'authKey2', 'type': 'str'}, - } - - def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: - super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) - self.auth_key1 = auth_key1 - self.auth_key2 = auth_key2 diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py index e387ef4077f2..a88e698ca0ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py @@ -50,11 +50,11 @@ class IntegrationRuntimeComputeProperties(Model): 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.location = kwargs.get('location', None) - self.node_size = kwargs.get('node_size', None) - self.number_of_nodes = kwargs.get('number_of_nodes', None) - self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) - self.v_net_properties = kwargs.get('v_net_properties', None) + def __init__(self, additional_properties=None, location=None, node_size=None, number_of_nodes=None, max_parallel_executions_per_node=None, v_net_properties=None): + super(IntegrationRuntimeComputeProperties, self).__init__() + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.v_net_properties = v_net_properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py deleted file mode 100644 index f47f339dd067..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeComputeProperties(Model): - """The compute resource properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param location: The location for managed integration runtime. The - supported regions could be found on - https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities - :type location: str - :param node_size: The node size requirement to managed integration - runtime. - :type node_size: str - :param number_of_nodes: The required number of nodes for managed - integration runtime. - :type number_of_nodes: int - :param max_parallel_executions_per_node: Maximum parallel executions count - per node for managed integration runtime. - :type max_parallel_executions_per_node: int - :param v_net_properties: VNet properties for managed integration runtime. - :type v_net_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties - """ - - _validation = { - 'number_of_nodes': {'minimum': 1}, - 'max_parallel_executions_per_node': {'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, - 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, - 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, - } - - def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: - super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.location = location - self.node_size = node_size - self.number_of_nodes = number_of_nodes - self.max_parallel_executions_per_node = max_parallel_executions_per_node - self.v_net_properties = v_net_properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py index c185f916e8e5..3bdb02304d52 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py @@ -59,9 +59,9 @@ class IntegrationRuntimeConnectionInfo(Model): 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(IntegrationRuntimeConnectionInfo, self).__init__() + self.additional_properties = additional_properties self.service_token = None self.identity_cert_thumbprint = None self.host_service_uri = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py deleted file mode 100644 index 8cc5aceb16d7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeConnectionInfo(Model): - """Connection information for encrypting the on-premises data source - credentials. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar service_token: The token generated in service. Callers use this - token to authenticate to integration runtime. - :vartype service_token: str - :ivar identity_cert_thumbprint: The integration runtime SSL certificate - thumbprint. Click-Once application uses it to do server validation. - :vartype identity_cert_thumbprint: str - :ivar host_service_uri: The on-premises integration runtime host URL. - :vartype host_service_uri: str - :ivar version: The integration runtime version. - :vartype version: str - :ivar public_key: The public key for encrypting a credential when - transferring the credential to the integration runtime. - :vartype public_key: str - :ivar is_identity_cert_exprired: Whether the identity certificate is - expired. - :vartype is_identity_cert_exprired: bool - """ - - _validation = { - 'service_token': {'readonly': True}, - 'identity_cert_thumbprint': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'version': {'readonly': True}, - 'public_key': {'readonly': True}, - 'is_identity_cert_exprired': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'service_token': {'key': 'serviceToken', 'type': 'str'}, - 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'public_key': {'key': 'publicKey', 'type': 'str'}, - 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.service_token = None - self.identity_cert_thumbprint = None - self.host_service_uri = None - self.version = None - self.public_key = None - self.is_identity_cert_exprired = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py index 44cd5fe5979b..b76cc5e39078 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py @@ -27,7 +27,7 @@ class IntegrationRuntimeCustomSetupScriptProperties(Model): 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = kwargs.get('blob_container_uri', None) - self.sas_token = kwargs.get('sas_token', None) + def __init__(self, blob_container_uri=None, sas_token=None): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__() + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py deleted file mode 100644 index 7f3c08c0b339..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeCustomSetupScriptProperties(Model): - """Custom setup script properties for a managed dedicated integration runtime. - - :param blob_container_uri: The URI of the Azure blob container that - contains the custom setup script. - :type blob_container_uri: str - :param sas_token: The SAS token of the Azure blob container. - :type sas_token: ~azure.mgmt.datafactory.models.SecureString - """ - - _attribute_map = { - 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, - } - - def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) - self.blob_container_uri = blob_container_uri - self.sas_token = sas_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py index ebc0e9b38d6f..cf6f5ee8fb6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -30,8 +30,8 @@ class IntegrationRuntimeDataProxyProperties(Model): 'path': {'key': 'path', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = kwargs.get('connect_via', None) - self.staging_linked_service = kwargs.get('staging_linked_service', None) - self.path = kwargs.get('path', None) + def __init__(self, connect_via=None, staging_linked_service=None, path=None): + super(IntegrationRuntimeDataProxyProperties, self).__init__() + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py deleted file mode 100644 index 532b774cad3d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeDataProxyProperties(Model): - """Data proxy properties for a managed dedicated integration runtime. - - :param connect_via: The self-hosted integration runtime reference. - :type connect_via: ~azure.mgmt.datafactory.models.EntityReference - :param staging_linked_service: The staging linked service reference. - :type staging_linked_service: - ~azure.mgmt.datafactory.models.EntityReference - :param path: The path to contain the staged data in the Blob storage. - :type path: str - """ - - _attribute_map = { - 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, - 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: - super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) - self.connect_via = connect_via - self.staging_linked_service = staging_linked_service - self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py index f7b695729403..aa1feac333d5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py @@ -27,7 +27,7 @@ class IntegrationRuntimeMonitoringData(Model): 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.nodes = kwargs.get('nodes', None) + def __init__(self, name=None, nodes=None): + super(IntegrationRuntimeMonitoringData, self).__init__() + self.name = name + self.nodes = nodes diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py deleted file mode 100644 index 16f3b656c9cc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeMonitoringData(Model): - """Get monitoring data response. - - :param name: Integration runtime name. - :type name: str - :param nodes: Integration runtime node monitoring data. - :type nodes: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, - } - - def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: - super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) - self.name = name - self.nodes = nodes diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py index 2edabd3e2472..a260924f1f16 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py @@ -30,6 +30,6 @@ class IntegrationRuntimeNodeIpAddress(Model): 'ip_address': {'key': 'ipAddress', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + def __init__(self): + super(IntegrationRuntimeNodeIpAddress, self).__init__() self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py deleted file mode 100644 index 476be9815984..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeNodeIpAddress(Model): - """The IP address of self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar ip_address: The IP address of self-hosted integration runtime node. - :vartype ip_address: str - """ - - _validation = { - 'ip_address': {'readonly': True}, - } - - _attribute_map = { - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) - self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py index 9d27bedf70aa..9f35cc040884 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py @@ -66,9 +66,9 @@ class IntegrationRuntimeNodeMonitoringData(Model): 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(IntegrationRuntimeNodeMonitoringData, self).__init__() + self.additional_properties = additional_properties self.node_name = None self.available_memory_in_mb = None self.cpu_utilization = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py deleted file mode 100644 index 35c7e664b2ff..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeNodeMonitoringData(Model): - """Monitoring data for integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar available_memory_in_mb: Available memory (MB) on the integration - runtime node. - :vartype available_memory_in_mb: int - :ivar cpu_utilization: CPU percentage on the integration runtime node. - :vartype cpu_utilization: int - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar concurrent_jobs_running: The number of jobs currently running on the - integration runtime node. - :vartype concurrent_jobs_running: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - :ivar sent_bytes: Sent bytes on the integration runtime node. - :vartype sent_bytes: float - :ivar received_bytes: Received bytes on the integration runtime node. - :vartype received_bytes: float - """ - - _validation = { - 'node_name': {'readonly': True}, - 'available_memory_in_mb': {'readonly': True}, - 'cpu_utilization': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'concurrent_jobs_running': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - 'sent_bytes': {'readonly': True}, - 'received_bytes': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, - 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, - 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_name = None - self.available_memory_in_mb = None - self.cpu_utilization = None - self.concurrent_jobs_limit = None - self.concurrent_jobs_running = None - self.max_concurrent_jobs = None - self.sent_bytes = None - self.received_bytes = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py deleted file mode 100644 index b4056a07591b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntime(Model): - """Azure Data Factory nested object which serves as a compute resource for - activities. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} - } - - def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: - super(IntegrationRuntime, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py index 7461d29de284..507b578a2cd8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py @@ -18,12 +18,10 @@ class IntegrationRuntimeReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: + :ivar type: Type of integration runtime. Default value: "IntegrationRuntimeReference" . :vartype type: str - :param reference_name: Required. Reference integration runtime name. + :param reference_name: Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. :type parameters: dict[str, object] @@ -42,7 +40,7 @@ class IntegrationRuntimeReference(Model): type = "IntegrationRuntimeReference" - def __init__(self, **kwargs): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, reference_name, parameters=None): + super(IntegrationRuntimeReference, self).__init__() + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py deleted file mode 100644 index 56fd3608ba61..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeReference(Model): - """Integration runtime reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference" . - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "IntegrationRuntimeReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py index 3cd91195af1b..f8b4a57d8ff0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py @@ -25,6 +25,6 @@ class IntegrationRuntimeRegenerateKeyParameters(Model): 'key_name': {'key': 'keyName', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = kwargs.get('key_name', None) + def __init__(self, key_name=None): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__() + self.key_name = key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py deleted file mode 100644 index f3846cf8ec55..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py +++ /dev/null @@ -1,30 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeRegenerateKeyParameters(Model): - """Parameters to regenerate the authentication key. - - :param key_name: The name of the authentication key to regenerate. - Possible values include: 'authKey1', 'authKey2' - :type key_name: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName - """ - - _attribute_map = { - 'key_name': {'key': 'keyName', 'type': 'str'}, - } - - def __init__(self, *, key_name=None, **kwargs) -> None: - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) - self.key_name = key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py index b18f376d3698..8568ed26cb1e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py @@ -18,8 +18,6 @@ class IntegrationRuntimeResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -28,7 +26,7 @@ class IntegrationRuntimeResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Integration runtime properties. + :param properties: Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ @@ -48,6 +46,6 @@ class IntegrationRuntimeResource(SubResource): 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + def __init__(self, properties): + super(IntegrationRuntimeResource, self).__init__() + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py deleted file mode 100644 index 9239f54166f9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class IntegrationRuntimeResource(SubResource): - """Integration runtime resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(IntegrationRuntimeResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py index 3399f8f38300..0c7e9dc74878 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py @@ -46,10 +46,10 @@ class IntegrationRuntimeSsisCatalogInfo(Model): 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) - self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) - self.catalog_admin_password = kwargs.get('catalog_admin_password', None) - self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + def __init__(self, additional_properties=None, catalog_server_endpoint=None, catalog_admin_user_name=None, catalog_admin_password=None, catalog_pricing_tier=None): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__() + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py deleted file mode 100644 index 27996bb4aeb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeSsisCatalogInfo(Model): - """Catalog information for managed dedicated integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_server_endpoint: The catalog database server URL. - :type catalog_server_endpoint: str - :param catalog_admin_user_name: The administrator user name of catalog - database. - :type catalog_admin_user_name: str - :param catalog_admin_password: The password of the administrator user - account of the catalog database. - :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString - :param catalog_pricing_tier: The pricing tier for the catalog database. - The valid values could be found in - https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible - values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' - :type catalog_pricing_tier: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier - """ - - _validation = { - 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, - 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, - 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, - 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: - super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.catalog_server_endpoint = catalog_server_endpoint - self.catalog_admin_user_name = catalog_admin_user_name - self.catalog_admin_password = catalog_admin_password - self.catalog_pricing_tier = catalog_pricing_tier diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py index 293f071aa0b3..3de1e2b2e9a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -49,11 +49,11 @@ class IntegrationRuntimeSsisProperties(Model): 'edition': {'key': 'edition', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.catalog_info = kwargs.get('catalog_info', None) - self.license_type = kwargs.get('license_type', None) - self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) - self.data_proxy_properties = kwargs.get('data_proxy_properties', None) - self.edition = kwargs.get('edition', None) + def __init__(self, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None): + super(IntegrationRuntimeSsisProperties, self).__init__() + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py deleted file mode 100644 index f75775e29a7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeSsisProperties(Model): - """SSIS properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param catalog_info: Catalog information for managed dedicated integration - runtime. - :type catalog_info: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo - :param license_type: License type for bringing your own license scenario. - Possible values include: 'BasePrice', 'LicenseIncluded' - :type license_type: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType - :param custom_setup_script_properties: Custom setup script properties for - a managed dedicated integration runtime. - :type custom_setup_script_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties - :param data_proxy_properties: Data proxy properties for a managed - dedicated integration runtime. - :type data_proxy_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties - :param edition: The edition for the SSIS Integration Runtime. Possible - values include: 'Standard', 'Enterprise' - :type edition: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, - 'license_type': {'key': 'licenseType', 'type': 'str'}, - 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, - 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, - 'edition': {'key': 'edition', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: - super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.catalog_info = catalog_info - self.license_type = license_type - self.custom_setup_script_properties = custom_setup_script_properties - self.data_proxy_properties = data_proxy_properties - self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py index 64da6347f9ed..b2645851fb8f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py @@ -22,8 +22,6 @@ class IntegrationRuntimeStatus(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -35,7 +33,7 @@ class IntegrationRuntimeStatus(Model): 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -56,9 +54,9 @@ class IntegrationRuntimeStatus(Model): 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} } - def __init__(self, **kwargs): - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(IntegrationRuntimeStatus, self).__init__() + self.additional_properties = additional_properties self.data_factory_name = None self.state = None self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py index 9382b4b08fde..23d5c95fcd28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py @@ -15,9 +15,7 @@ class IntegrationRuntimeStatusListResponse(Model): """A list of integration runtime status. - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. + :param value: List of integration runtime status. :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :param next_link: The link to the next page of results, if any remaining @@ -34,7 +32,7 @@ class IntegrationRuntimeStatusListResponse(Model): 'next_link': {'key': 'nextLink', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) + def __init__(self, value, next_link=None): + super(IntegrationRuntimeStatusListResponse, self).__init__() + self.value = value + self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py deleted file mode 100644 index bed71f74ffc6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatusListResponse(Model): - """A list of integration runtime status. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of integration runtime status. - :type value: - list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, *, value, next_link: str=None, **kwargs) -> None: - super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py deleted file mode 100644 index 8541e04dc679..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatus(Model): - """Integration runtime status. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SelfHostedIntegrationRuntimeStatus, - ManagedIntegrationRuntimeStatus - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(IntegrationRuntimeStatus, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.data_factory_name = None - self.state = None - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py index 901b4d8b7442..89b045642459 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py @@ -18,11 +18,9 @@ class IntegrationRuntimeStatusResponse(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar name: The integration runtime name. :vartype name: str - :param properties: Required. Integration runtime properties. + :param properties: Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ @@ -36,7 +34,7 @@ class IntegrationRuntimeStatusResponse(Model): 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + def __init__(self, properties): + super(IntegrationRuntimeStatusResponse, self).__init__() self.name = None - self.properties = kwargs.get('properties', None) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py deleted file mode 100644 index 64d84a1e4f19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeStatusResponse(Model): - """Integration runtime status response. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The integration runtime name. - :vartype name: str - :param properties: Required. Integration runtime properties. - :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus - """ - - _validation = { - 'name': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) - self.name = None - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py index 752b5b99eb60..702723a2f067 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py @@ -31,8 +31,8 @@ class IntegrationRuntimeVNetProperties(Model): 'subnet': {'key': 'subnet', 'type': 'str'}, } - def __init__(self, **kwargs): - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.v_net_id = kwargs.get('v_net_id', None) - self.subnet = kwargs.get('subnet', None) + def __init__(self, additional_properties=None, v_net_id=None, subnet=None): + super(IntegrationRuntimeVNetProperties, self).__init__() + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py deleted file mode 100644 index 32e8beb31ea1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class IntegrationRuntimeVNetProperties(Model): - """VNet properties for managed integration runtime. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param v_net_id: The ID of the VNet that this integration runtime will - join. - :type v_net_id: str - :param subnet: The name of the subnet this integration runtime will join. - :type subnet: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'v_net_id': {'key': 'vNetId', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: - super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.v_net_id = v_net_id - self.subnet = subnet diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py index 517cdd63caa5..4f6d09869599 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -15,8 +15,6 @@ class JiraLinkedService(LinkedService): """Jira Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,17 +29,16 @@ class JiraLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The IP address or host name of the Jira service. - (e.g. jira.example.com) + :param host: The IP address or host name of the Jira service. (e.g. + jira.example.com) :type host: object :param port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. :type port: object - :param username: Required. The user name that you use to access Jira - Service. + :param username: The user name that you use to access Jira Service. :type username: object :param password: The password corresponding to the user name that you provided in the username field. @@ -85,14 +82,14 @@ class JiraLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(JiraLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py deleted file mode 100644 index 82dc8d578da3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class JiraLinkedService(LinkedService): - """Jira Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Jira service. - (e.g. jira.example.com) - :type host: object - :param port: The TCP port that the Jira server uses to listen for client - connections. The default value is 443 if connecting through HTTPS, or 8080 - if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira - Service. - :type username: object - :param password: The password corresponding to the user name that you - provided in the username field. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py index 1c2b12c18e15..d8533fbefd38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py @@ -15,8 +15,6 @@ class JiraObjectDataset(Dataset): """Jira Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class JiraObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class JiraObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class JiraObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(JiraObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py deleted file mode 100644 index 3c061b238cde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class JiraObjectDataset(Dataset): - """Jira Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py index 709da0ce1205..86a0f16222da 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -15,8 +15,6 @@ class JiraSource(CopySource): """A copy activity Jira Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class JiraSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class JiraSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(JiraSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py deleted file mode 100644 index c958c8351bb3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class JiraSource(CopySource): - """A copy activity Jira Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py index c1cee8f00b8d..8626e76359c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py @@ -15,8 +15,6 @@ class JsonDataset(Dataset): """Json dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class JsonDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,9 +39,9 @@ class JsonDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param location: Required. The location of the json data storage. + :param location: The location of the json data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode @@ -77,9 +75,9 @@ class JsonDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, **kwargs): - super(JsonDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression = kwargs.get('compression', None) + def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None): + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.location = location + self.encoding_name = encoding_name + self.compression = compression self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py deleted file mode 100644 index 564fe3bebf6b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class JsonDataset(Dataset): - """Json dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the json data storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not - specified, the default value is UTF-8, unless BOM denotes another Unicode - encoding. Refer to the name column of the table in the following link to - set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.mgmt.datafactory.models.DatasetCompression - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: - super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.encoding_name = encoding_name - self.compression = compression - self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py index 80f4ff0aaf8b..8a50bef90c81 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -15,8 +15,6 @@ class JsonFormat(DatasetStorageFormat): """The data stored in JSON format. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is @@ -72,11 +70,11 @@ class JsonFormat(DatasetStorageFormat): 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, } - def __init__(self, **kwargs): - super(JsonFormat, self).__init__(**kwargs) - self.file_pattern = kwargs.get('file_pattern', None) - self.nesting_separator = kwargs.get('nesting_separator', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.json_node_reference = kwargs.get('json_node_reference', None) - self.json_path_definition = kwargs.get('json_path_definition', None) + def __init__(self, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None): + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py deleted file mode 100644 index 2fdb44cc3b7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param file_pattern: File pattern of JSON. To be more specific, the way of - separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. - :type file_pattern: object - :param nesting_separator: The character used to separate nesting levels. - Default value is '.' (dot). Type: string (or Expression with resultType - string). - :type nesting_separator: object - :param encoding_name: The code page name of the preferred encoding. If not - provided, the default value is 'utf-8', unless the byte order mark (BOM) - denotes another Unicode encoding. The full list of supported values can be - found in the 'Name' column of the table of encodings in the following - reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param json_node_reference: The JSONPath of the JSON array element to be - flattened. Example: "$.ArrayPath". Type: string (or Expression with - resultType string). - :type json_node_reference: object - :param json_path_definition: The JSONPath definition for each column - mapping with a customized column name to extract data from JSON file. For - fields under root object, start with "$"; for fields inside the array - chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. - Type: object (or Expression with resultType object). - :type json_path_definition: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'object'}, - 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, - 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: - super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.file_pattern = file_pattern - self.nesting_separator = nesting_separator - self.encoding_name = encoding_name - self.json_node_reference = json_node_reference - self.json_path_definition = json_path_definition - self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py index 829344338672..93c9675fce43 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py @@ -15,8 +15,6 @@ class JsonSink(CopySink): """A copy activity Json sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class JsonSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -62,8 +60,8 @@ class JsonSink(CopySink): 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } - def __init__(self, **kwargs): - super(JsonSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) - self.format_settings = kwargs.get('format_settings', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings + self.format_settings = format_settings self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py deleted file mode 100644 index 3212bb4784d8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class JsonSink(CopySink): - """A copy activity Json sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :param format_settings: Json format settings. - :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.format_settings = format_settings - self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py index a3349ee1d39e..88babd043c9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py @@ -15,8 +15,6 @@ class JsonSource(CopySource): """A copy activity Json source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class JsonSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -50,7 +48,7 @@ class JsonSource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, **kwargs): - super(JsonSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py deleted file mode 100644 index 7e5b73662801..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class JsonSource(CopySource): - """A copy activity Json source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Json store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py index 287da9805170..726307c33016 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py @@ -15,12 +15,10 @@ class JsonWriteSettings(FormatWriteSettings): """Json write settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. + :param type: The write setting type. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is @@ -40,6 +38,6 @@ class JsonWriteSettings(FormatWriteSettings): 'file_pattern': {'key': 'filePattern', 'type': 'str'}, } - def __init__(self, **kwargs): - super(JsonWriteSettings, self).__init__(**kwargs) - self.file_pattern = kwargs.get('file_pattern', None) + def __init__(self, type, additional_properties=None, file_pattern=None): + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type) + self.file_pattern = file_pattern diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py deleted file mode 100644 index f78f57eb1187..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .format_write_settings_py3 import FormatWriteSettings - - -class JsonWriteSettings(FormatWriteSettings): - """Json write settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str - :param file_pattern: File pattern of JSON. This setting controls the way a - collection of JSON objects will be treated. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonWriteFilePattern - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, - } - - def __init__(self, *, type: str, additional_properties=None, file_pattern=None, **kwargs) -> None: - super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) - self.file_pattern = file_pattern diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py index f4a4e7eb8bf0..1a733032b07e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py @@ -49,8 +49,8 @@ class LinkedIntegrationRuntime(Model): 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, } - def __init__(self, **kwargs): - super(LinkedIntegrationRuntime, self).__init__(**kwargs) + def __init__(self): + super(LinkedIntegrationRuntime, self).__init__() self.name = None self.subscription_id = None self.data_factory_name = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py index b7be47e8f096..586f4bd348f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py @@ -15,11 +15,9 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): """The key authorization type integration runtime. - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. + :param authorization_type: Constant filled by server. :type authorization_type: str - :param key: Required. The key used for authorization. + :param key: The key used for authorization. :type key: ~azure.mgmt.datafactory.models.SecureString """ @@ -33,7 +31,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): 'key': {'key': 'key', 'type': 'SecureString'}, } - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.key = kwargs.get('key', None) + def __init__(self, key): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__() + self.key = key self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py deleted file mode 100644 index 4a2ebd8d1003..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType - - -class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): - """The key authorization type integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param key: Required. The key used for authorization. - :type key: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'authorization_type': {'required': True}, - 'key': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'SecureString'}, - } - - def __init__(self, *, key, **kwargs) -> None: - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) - self.key = key - self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py deleted file mode 100644 index 6c831ab5f511..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntime(Model): - """The linked integration runtime information. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: The name of the linked integration runtime. - :vartype name: str - :ivar subscription_id: The subscription ID for which the linked - integration runtime belong to. - :vartype subscription_id: str - :ivar data_factory_name: The name of the data factory for which the linked - integration runtime belong to. - :vartype data_factory_name: str - :ivar data_factory_location: The location of the data factory for which - the linked integration runtime belong to. - :vartype data_factory_location: str - :ivar create_time: The creating time of the linked integration runtime. - :vartype create_time: datetime - """ - - _validation = { - 'name': {'readonly': True}, - 'subscription_id': {'readonly': True}, - 'data_factory_name': {'readonly': True}, - 'data_factory_location': {'readonly': True}, - 'create_time': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, - 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, - } - - def __init__(self, **kwargs) -> None: - super(LinkedIntegrationRuntime, self).__init__(**kwargs) - self.name = None - self.subscription_id = None - self.data_factory_name = None - self.data_factory_location = None - self.create_time = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py index 3fbc8dd9cac2..e5b1d30fe428 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py @@ -16,12 +16,10 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): """The role based access control (RBAC) authorization type integration runtime. - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. + :param authorization_type: Constant filled by server. :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration - runtime to be shared. + :param resource_id: The resource identifier of the integration runtime to + be shared. :type resource_id: str """ @@ -35,7 +33,7 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): 'resource_id': {'key': 'resourceId', 'type': 'str'}, } - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.resource_id = kwargs.get('resource_id', None) + def __init__(self, resource_id): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__() + self.resource_id = resource_id self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py deleted file mode 100644 index 055b64809e18..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType - - -class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): - """The role based access control (RBAC) authorization type integration - runtime. - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - :param resource_id: Required. The resource identifier of the integration - runtime to be shared. - :type resource_id: str - """ - - _validation = { - 'authorization_type': {'required': True}, - 'resource_id': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__(self, *, resource_id: str, **kwargs) -> None: - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) - self.resource_id = resource_id - self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py index 807757332b3e..983352e73d39 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py @@ -15,10 +15,8 @@ class LinkedIntegrationRuntimeRequest(Model): """Data factory name for linked integration runtime request. - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked - integration runtime. + :param linked_factory_name: The data factory name for linked integration + runtime. :type linked_factory_name: str """ @@ -30,6 +28,6 @@ class LinkedIntegrationRuntimeRequest(Model): 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, } - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = kwargs.get('linked_factory_name', None) + def __init__(self, linked_factory_name): + super(LinkedIntegrationRuntimeRequest, self).__init__() + self.linked_factory_name = linked_factory_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py deleted file mode 100644 index 45362ab63ba3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntimeRequest(Model): - """Data factory name for linked integration runtime request. - - All required parameters must be populated in order to send to Azure. - - :param linked_factory_name: Required. The data factory name for linked - integration runtime. - :type linked_factory_name: str - """ - - _validation = { - 'linked_factory_name': {'required': True}, - } - - _attribute_map = { - 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, - } - - def __init__(self, *, linked_factory_name: str, **kwargs) -> None: - super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) - self.linked_factory_name = linked_factory_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py index 446395bb9cbf..a8127e3bbfd3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py @@ -19,9 +19,7 @@ class LinkedIntegrationRuntimeType(Model): sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, LinkedIntegrationRuntimeKeyAuthorization - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. + :param authorization_type: Constant filled by server. :type authorization_type: str """ @@ -37,6 +35,6 @@ class LinkedIntegrationRuntimeType(Model): 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} } - def __init__(self, **kwargs): - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + def __init__(self): + super(LinkedIntegrationRuntimeType, self).__init__() self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py deleted file mode 100644 index 79468dc450d2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedIntegrationRuntimeType(Model): - """The base definition of a linked integration runtime. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, - LinkedIntegrationRuntimeKeyAuthorization - - All required parameters must be populated in order to send to Azure. - - :param authorization_type: Required. Constant filled by server. - :type authorization_type: str - """ - - _validation = { - 'authorization_type': {'required': True}, - } - - _attribute_map = { - 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, - } - - _subtype_map = { - 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} - } - - def __init__(self, **kwargs) -> None: - super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) - self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 2778a33fbb5a..55eb7c94bbea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -55,8 +55,6 @@ class LinkedService(Model): AzureTableStorageLinkedService, AzureBlobStorageLinkedService, AzureStorageLinkedService - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -71,7 +69,7 @@ class LinkedService(Model): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -92,11 +90,11 @@ class LinkedService(Model): 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } - def __init__(self, **kwargs): - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.connect_via = kwargs.get('connect_via', None) - self.description = kwargs.get('description', None) - self.parameters = kwargs.get('parameters', None) - self.annotations = kwargs.get('annotations', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(LinkedService, self).__init__() + self.additional_properties = additional_properties + self.connect_via = connect_via + self.description = description + self.parameters = parameters + self.annotations = annotations self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py deleted file mode 100644 index 2b3e475c3075..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedService(Model): - """The Azure Data Factory nested object which contains the information and - credential which can be used to connect with related store or compute - resource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, - AzureDataExplorerLinkedService, SapTableLinkedService, - GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, - DynamicsAXLinkedService, ResponsysLinkedService, - AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, - NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, - XeroLinkedService, SquareLinkedService, SparkLinkedService, - ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, - PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, - MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, - MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, - HubspotLinkedService, HiveLinkedService, HBaseLinkedService, - GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, - DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, - AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, - SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, - HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, - AmazonRedshiftLinkedService, AmazonS3LinkedService, - RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, - SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, - AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, - MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, - MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, - AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, - SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(LinkedService, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.connect_via = connect_via - self.description = description - self.parameters = parameters - self.annotations = annotations - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py index 28ffeda7d01a..bedc0f2d8fe1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py @@ -18,12 +18,10 @@ class LinkedServiceReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: + :ivar type: Linked service reference type. Default value: "LinkedServiceReference" . :vartype type: str - :param reference_name: Required. Reference LinkedService name. + :param reference_name: Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. :type parameters: dict[str, object] @@ -42,7 +40,7 @@ class LinkedServiceReference(Model): type = "LinkedServiceReference" - def __init__(self, **kwargs): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, reference_name, parameters=None): + super(LinkedServiceReference, self).__init__() + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py deleted file mode 100644 index b6238130bdb6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LinkedServiceReference(Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: - "LinkedServiceReference" . - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py index 75828718f589..cbbc70cbc1ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py @@ -18,8 +18,6 @@ class LinkedServiceResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -28,7 +26,7 @@ class LinkedServiceResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of linked service. + :param properties: Properties of linked service. :type properties: ~azure.mgmt.datafactory.models.LinkedService """ @@ -48,6 +46,6 @@ class LinkedServiceResource(SubResource): 'properties': {'key': 'properties', 'type': 'LinkedService'}, } - def __init__(self, **kwargs): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + def __init__(self, properties): + super(LinkedServiceResource, self).__init__() + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py deleted file mode 100644 index 1fa964b51f57..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class LinkedServiceResource(SubResource): - """Linked service resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.mgmt.datafactory.models.LinkedService - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py index 81b4e7ca619e..b09114e37957 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py @@ -15,13 +15,10 @@ class LogStorageSettings(Model): """Log storage settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service - reference. + :param linked_service_name: Log storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity @@ -39,8 +36,8 @@ class LogStorageSettings(Model): 'path': {'key': 'path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, additional_properties=None, path=None): + super(LogStorageSettings, self).__init__() + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py deleted file mode 100644 index 4850b7adacdf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class LogStorageSettings(Model): - """Log storage settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service - reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity - execution. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py index 62584b2f704a..282596c4b291 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py @@ -15,12 +15,10 @@ class LookupActivity(ExecutionActivity): """Lookup activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,17 +26,17 @@ class LookupActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy - activity source. + :param source: Dataset-specific source properties, same as copy activity + source. :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. + :param dataset: Lookup activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). @@ -66,9 +64,9 @@ class LookupActivity(ExecutionActivity): 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, } - def __init__(self, **kwargs): - super(LookupActivity, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.dataset = kwargs.get('dataset', None) - self.first_row_only = kwargs.get('first_row_only', None) + def __init__(self, name, source, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None): + super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.source = source + self.dataset = dataset + self.first_row_only = first_row_only self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py deleted file mode 100644 index 41061675ebbe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class LookupActivity(ExecutionActivity): - """Lookup activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy - activity source. - :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default - value is true. Type: boolean (or Expression with resultType boolean). - :type first_row_only: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, - } - - def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: - super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.source = source - self.dataset = dataset - self.first_row_only = first_row_only - self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py index 9d65437b5daa..c6a79ad1cf28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -15,8 +15,6 @@ class MagentoLinkedService(LinkedService): """Magento server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class MagentoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The URL of the Magento instance. (i.e. + :param host: The URL of the Magento instance. (i.e. 192.168.222.110/magento3) :type host: object :param access_token: The access token from Magento. @@ -74,12 +72,12 @@ class MagentoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MagentoLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py deleted file mode 100644 index 74de1573118b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MagentoLinkedService(LinkedService): - """Magento server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Magento instance. (i.e. - 192.168.222.110/magento3) - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py index ad540093ca55..2d8a6ec71705 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py @@ -15,8 +15,6 @@ class MagentoObjectDataset(Dataset): """Magento server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MagentoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class MagentoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class MagentoObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MagentoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py deleted file mode 100644 index 481732bb688a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MagentoObjectDataset(Dataset): - """Magento server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py index df49fe63a544..f3e5d1c2c385 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -15,8 +15,6 @@ class MagentoSource(CopySource): """A copy activity Magento server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MagentoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class MagentoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MagentoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py deleted file mode 100644 index 15efcc12a054..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MagentoSource(CopySource): - """A copy activity Magento server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py index 9cbc9e94e7c3..9d750318aacd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py @@ -19,14 +19,12 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: 'Initial', 'Stopped', @@ -57,9 +55,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } - def __init__(self, **kwargs): - super(ManagedIntegrationRuntime, self).__init__(**kwargs) + def __init__(self, additional_properties=None, description=None, compute_properties=None, ssis_properties=None): + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description) self.state = None - self.compute_properties = kwargs.get('compute_properties', None) - self.ssis_properties = kwargs.get('ssis_properties', None) + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py index c70323697fdf..642975fcf5ef 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py @@ -46,9 +46,9 @@ class ManagedIntegrationRuntimeError(Model): 'message': {'key': 'message', 'type': 'str'}, } - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(ManagedIntegrationRuntimeError, self).__init__() + self.additional_properties = additional_properties self.time = None self.code = None self.parameters = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py deleted file mode 100644 index 1668c5196537..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeError(Model): - """Error definition for managed integration runtime. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar time: The time when the error occurred. - :vartype time: datetime - :ivar code: Error code. - :vartype code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar message: Error message. - :vartype message: str - """ - - _validation = { - 'time': {'readonly': True}, - 'code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'time': {'key': 'time', 'type': 'iso-8601'}, - 'code': {'key': 'code', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.time = None - self.code = None - self.parameters = None - self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py index e9c0169cf6c5..306b51ec9e45 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py @@ -44,9 +44,9 @@ class ManagedIntegrationRuntimeNode(Model): 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, } - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None, errors=None): + super(ManagedIntegrationRuntimeNode, self).__init__() + self.additional_properties = additional_properties self.node_id = None self.status = None - self.errors = kwargs.get('errors', None) + self.errors = errors diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py deleted file mode 100644 index 0e8104d0de05..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeNode(Model): - """Properties of integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_id: The managed integration runtime node id. - :vartype node_id: str - :ivar status: The managed integration runtime node status. Possible values - include: 'Starting', 'Available', 'Recycling', 'Unavailable' - :vartype status: str or - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus - :param errors: The errors that occurred on this integration runtime node. - :type errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - """ - - _validation = { - 'node_id': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, - } - - def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_id = None - self.status = None - self.errors = errors diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py index 2329f7a2ba36..83dc66fbb496 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py @@ -54,9 +54,9 @@ class ManagedIntegrationRuntimeOperationResult(Model): 'activity_id': {'key': 'activityId', 'type': 'str'}, } - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(ManagedIntegrationRuntimeOperationResult, self).__init__() + self.additional_properties = additional_properties self.type = None self.start_time = None self.result = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py deleted file mode 100644 index 58a80c0e600e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ManagedIntegrationRuntimeOperationResult(Model): - """Properties of managed integration runtime operation result. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar type: The operation type. Could be start or stop. - :vartype type: str - :ivar start_time: The start time of the operation. - :vartype start_time: datetime - :ivar result: The operation result. - :vartype result: str - :ivar error_code: The error code. - :vartype error_code: str - :ivar parameters: Managed integration runtime error parameters. - :vartype parameters: list[str] - :ivar activity_id: The activity id for the operation request. - :vartype activity_id: str - """ - - _validation = { - 'type': {'readonly': True}, - 'start_time': {'readonly': True}, - 'result': {'readonly': True}, - 'error_code': {'readonly': True}, - 'parameters': {'readonly': True}, - 'activity_id': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'result': {'key': 'result', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '[str]'}, - 'activity_id': {'key': 'activityId', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - self.start_time = None - self.result = None - self.error_code = None - self.parameters = None - self.activity_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py deleted file mode 100644 index 0e71d8b09f4e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_py3 import IntegrationRuntime - - -class ManagedIntegrationRuntime(IntegrationRuntime): - """Managed integration runtime, including managed elastic and managed - dedicated integration runtimes. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :ivar state: Integration runtime state, only valid for managed dedicated - integration runtime. Possible values include: 'Initial', 'Stopped', - 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', - 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param compute_properties: The compute resource for managed integration - runtime. - :type compute_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties - :param ssis_properties: SSIS properties for managed integration runtime. - :type ssis_properties: - ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties - """ - - _validation = { - 'type': {'required': True}, - 'state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, - 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.state = None - self.compute_properties = compute_properties - self.ssis_properties = ssis_properties - self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py index 17d21775f09f..347948608e2b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py @@ -18,8 +18,6 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. @@ -69,8 +67,8 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, } - def __init__(self, **kwargs): - super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) + def __init__(self, additional_properties=None): + super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties) self.create_time = None self.nodes = None self.other_errors = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py deleted file mode 100644 index 03d9451045bd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_status_py3 import IntegrationRuntimeStatus - - -class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Managed integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar nodes: The list of nodes for managed integration runtime. - :vartype nodes: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] - :ivar other_errors: The errors that occurred on this integration runtime. - :vartype other_errors: - list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] - :ivar last_operation: The last operation result that occurred on this - integration runtime. - :vartype last_operation: - ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'nodes': {'readonly': True}, - 'other_errors': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, - 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, - 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.create_time = None - self.nodes = None - self.other_errors = None - self.last_operation = None - self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py index 3bbe048d4877..39a73e948d06 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -15,8 +15,6 @@ class MariaDBLinkedService(LinkedService): """MariaDB server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MariaDBLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -61,9 +59,9 @@ class MariaDBLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MariaDBLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): + super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py deleted file mode 100644 index 475284d56038..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py index a744c1c5ff8f..279e044aa946 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -15,8 +15,6 @@ class MariaDBSource(CopySource): """A copy activity MariaDB server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MariaDBSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class MariaDBSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MariaDBSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py deleted file mode 100644 index 472877b8f0bb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MariaDBSource(CopySource): - """A copy activity MariaDB server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py index 66dc9c8ea9b7..d20a77832970 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py @@ -15,8 +15,6 @@ class MariaDBTableDataset(Dataset): """MariaDB server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MariaDBTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class MariaDBTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class MariaDBTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MariaDBTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py deleted file mode 100644 index ac3c8cf2ea72..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py index 2a9e76446122..0834db5587fd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -15,8 +15,6 @@ class MarketoLinkedService(LinkedService): """Marketo server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,12 +29,12 @@ class MarketoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the Marketo server. (i.e. + :param endpoint: The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. + :param client_id: The client Id of your Marketo service. :type client_id: object :param client_secret: The client secret of your Marketo service. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase @@ -78,13 +76,13 @@ class MarketoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MarketoLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py deleted file mode 100644 index dc326f24acd5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MarketoLinkedService(LinkedService): - """Marketo server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Marketo server. (i.e. - 123-ABC-321.mktorest.com) - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py index 63daa10047b9..281992bf1a05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py @@ -15,8 +15,6 @@ class MarketoObjectDataset(Dataset): """Marketo server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MarketoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class MarketoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class MarketoObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MarketoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py deleted file mode 100644 index 7179d5af53dd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MarketoObjectDataset(Dataset): - """Marketo server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py index 6d2061ef0dee..907fb4a27a8e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -15,8 +15,6 @@ class MarketoSource(CopySource): """A copy activity Marketo server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MarketoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class MarketoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MarketoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py deleted file mode 100644 index 573dc0439754..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MarketoSource(CopySource): - """A copy activity Marketo server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py index b53164f6266b..ae9755b667fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py @@ -15,8 +15,6 @@ class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class MicrosoftAccessLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: + :param connection_string: The non-access credential portion of the + connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the @@ -75,12 +73,12 @@ class MicrosoftAccessLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None): + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py deleted file mode 100644 index c9f79c24adf3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - Microsoft Access as ODBC data store. Possible values are: Anonymous and - Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py index 53406fa25022..c8b127128c15 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py @@ -15,8 +15,6 @@ class MicrosoftAccessSink(CopySink): """A copy activity Microsoft Access sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -60,7 +58,7 @@ class MicrosoftAccessSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MicrosoftAccessSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py deleted file mode 100644 index 700db840c03d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class MicrosoftAccessSink(CopySink): - """A copy activity Microsoft Access sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py index 73cd3a64184c..e15b6a7dbac3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py @@ -15,8 +15,6 @@ class MicrosoftAccessSource(CopySource): """A copy activity source for Microsoft Access. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MicrosoftAccessSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class MicrosoftAccessSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MicrosoftAccessSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py deleted file mode 100644 index 1cccd82c8b19..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MicrosoftAccessSource(CopySource): - """A copy activity source for Microsoft Access. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py index f312dae024f5..637534dbd571 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py @@ -15,8 +15,6 @@ class MicrosoftAccessTableDataset(Dataset): """The Microsoft Access table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MicrosoftAccessTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class MicrosoftAccessTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py deleted file mode 100644 index 3fad904ef58b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Microsoft Access table name. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py index 796c5e14eaca..aeb7ceee6dc5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py @@ -15,8 +15,6 @@ class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MongoDbCollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class MongoDbCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param collection_name: Required. The table name of the MongoDB database. - Type: string (or Expression with resultType string). + :param collection_name: The table name of the MongoDB database. Type: + string (or Expression with resultType string). :type collection_name: object """ @@ -67,7 +65,7 @@ class MongoDbCollectionDataset(Dataset): 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.collection_name = kwargs.get('collection_name', None) + def __init__(self, linked_service_name, collection_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.collection_name = collection_name self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py deleted file mode 100644 index 68fe2affb0e4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection_name: Required. The table name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection_name = collection_name - self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py index a2d2127d1397..a2c1fd77cc23 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -44,10 +44,10 @@ class MongoDbCursorMethodsProperties(Model): 'limit': {'key': 'limit', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.project = kwargs.get('project', None) - self.sort = kwargs.get('sort', None) - self.skip = kwargs.get('skip', None) - self.limit = kwargs.get('limit', None) + def __init__(self, additional_properties=None, project=None, sort=None, skip=None, limit=None): + super(MongoDbCursorMethodsProperties, self).__init__() + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py deleted file mode 100644 index e1e3f50d1539..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class MongoDbCursorMethodsProperties(Model): - """Cursor methods for Mongodb query. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param project: Specifies the fields to return in the documents that match - the query filter. To return all fields in the matching documents, omit - this parameter. Type: string (or Expression with resultType string). - :type project: object - :param sort: Specifies the order in which the query returns matching - documents. Type: string (or Expression with resultType string). Type: - string (or Expression with resultType string). - :type sort: object - :param skip: Specifies the how many documents skipped and where MongoDB - begins returning results. This approach may be useful in implementing - paginated results. Type: integer (or Expression with resultType integer). - :type skip: object - :param limit: Specifies the maximum number of documents the server - returns. limit() is analogous to the LIMIT statement in a SQL database. - Type: integer (or Expression with resultType integer). - :type limit: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'project': {'key': 'project', 'type': 'object'}, - 'sort': {'key': 'sort', 'type': 'object'}, - 'skip': {'key': 'skip', 'type': 'object'}, - 'limit': {'key': 'limit', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: - super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.project = project - self.sort = sort - self.skip = skip - self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py index 76d162b0ff70..ce0c34dfdb77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -15,8 +15,6 @@ class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,17 +29,17 @@ class MongoDbLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param server: Required. The IP address or server name of the MongoDB - server. Type: string (or Expression with resultType string). + :param server: The IP address or server name of the MongoDB server. Type: + string (or Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: 'Basic', 'Anonymous' :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you - want to access. Type: string (or Expression with resultType string). + :param database_name: The name of the MongoDB database that you want to + access. Type: string (or Expression with resultType string). :type database_name: object :param username: Username for authentication. Type: string (or Expression with resultType string). @@ -94,16 +92,16 @@ class MongoDbLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.database_name = kwargs.get('database_name', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.auth_source = kwargs.get('auth_source', None) - self.port = kwargs.get('port', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, database_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None): + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.authentication_type = authentication_type + self.database_name = database_name + self.username = username + self.password = password + self.auth_source = auth_source + self.port = port + self.enable_ssl = enable_ssl + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py deleted file mode 100644 index 95308b6ea8f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py +++ /dev/null @@ -1,109 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. The IP address or server name of the MongoDB - server. Type: string (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the MongoDB database. Possible values include: 'Basic', 'Anonymous' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you - want to access. Type: string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param auth_source: Database to verify the username and password. Type: - string (or Expression with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen - for client connections. The default value is 27017. Type: integer (or - Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.authentication_type = authentication_type - self.database_name = database_name - self.username = username - self.password = password - self.auth_source = auth_source - self.port = port - self.enable_ssl = enable_ssl - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py index 3da4b931f5e5..f044c74e1e06 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -15,8 +15,6 @@ class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MongoDbSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class MongoDbSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py deleted file mode 100644 index ab3e5b6e0cc9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MongoDbSource(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Should be a SQL-92 query expression. Type: - string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py index 17089373d4c5..b50597dad249 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -15,8 +15,6 @@ class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MongoDbV2CollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class MongoDbV2CollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param collection: Required. The collection name of the MongoDB database. - Type: string (or Expression with resultType string). + :param collection: The collection name of the MongoDB database. Type: + string (or Expression with resultType string). :type collection: object """ @@ -67,7 +65,7 @@ class MongoDbV2CollectionDataset(Dataset): 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.collection = kwargs.get('collection', None) + def __init__(self, linked_service_name, collection, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.collection = collection self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py deleted file mode 100644 index ad1e5c538645..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param collection: Required. The collection name of the MongoDB database. - Type: string (or Expression with resultType string). - :type collection: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'collection': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.collection = collection - self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py index bb29fc767420..8fd966440047 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -15,8 +15,6 @@ class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,14 +29,14 @@ class MongoDbV2LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The MongoDB connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. + :param connection_string: The MongoDB connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. :type connection_string: object - :param database: Required. The name of the MongoDB database that you want - to access. Type: string (or Expression with resultType string). + :param database: The name of the MongoDB database that you want to access. + Type: string (or Expression with resultType string). :type database: object """ @@ -59,8 +57,8 @@ class MongoDbV2LinkedService(LinkedService): 'database': {'key': 'typeProperties.database', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.database = kwargs.get('database', None) + def __init__(self, connection_string, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.database = database self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py deleted file mode 100644 index d1388ce797a5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The MongoDB connection string. Type: - string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want - to access. Type: string (or Expression with resultType string). - :type database: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - } - - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py index e951674a8e22..3606e04e1878 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -15,8 +15,6 @@ class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MongoDbV2Source(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty @@ -63,9 +61,9 @@ class MongoDbV2Source(CopySource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MongoDbV2Source, self).__init__(**kwargs) - self.filter = kwargs.get('filter', None) - self.cursor_methods = kwargs.get('cursor_methods', None) - self.batch_size = kwargs.get('batch_size', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None): + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py deleted file mode 100644 index 9b8eec114a06..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MongoDbV2Source(CopySource): - """A copy activity source for a MongoDB database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param filter: Specifies selection filter using query operators. To return - all documents in a collection, omit this parameter or pass an empty - document ({}). Type: string (or Expression with resultType string). - :type filter: object - :param cursor_methods: Cursor methods for Mongodb query - :type cursor_methods: - ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties - :param batch_size: Specifies the number of documents to return in each - batch of the response from MongoDB instance. In most cases, modifying the - batch size will not affect the user or the application. This property's - main purpose is to avoid hit the limitation of response size. Type: - integer (or Expression with resultType integer). - :type batch_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size - self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py index 1be28aa1b6ab..c4b0edd8c166 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -22,8 +22,6 @@ class MultiplePipelineTrigger(Trigger): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -37,7 +35,7 @@ class MultiplePipelineTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: @@ -62,7 +60,7 @@ class MultiplePipelineTrigger(Trigger): 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } - def __init__(self, **kwargs): - super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.pipelines = kwargs.get('pipelines', None) + def __init__(self, additional_properties=None, description=None, annotations=None, pipelines=None): + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations) + self.pipelines = pipelines self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py deleted file mode 100644 index 206ab74ef419..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_py3 import Trigger - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to - pipeline. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - } - - _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} - } - - def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipelines = pipelines - self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py index ec85b0136714..cbc8a7dd5768 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -15,8 +15,6 @@ class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class MySqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. + :param connection_string: The connection string. :type connection_string: ~azure.mgmt.datafactory.models.SecretBase :param password: The Azure key vault secret reference of password in connection string. @@ -62,9 +60,9 @@ class MySqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MySqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py deleted file mode 100644 index b8038df22fd6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py index c2b0b66eabb1..5c7fe726220f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py @@ -15,8 +15,6 @@ class MySqlSource(CopySource): """A copy activity source for MySQL databases. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class MySqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class MySqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MySqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py deleted file mode 100644 index 3a0315d83979..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class MySqlSource(CopySource): - """A copy activity source for MySQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py index 3bb1584975d5..52917a98abde 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py @@ -15,8 +15,6 @@ class MySqlTableDataset(Dataset): """The MySQL table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class MySqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class MySqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The MySQL table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class MySqlTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(MySqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py deleted file mode 100644 index 33263561dfde..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The MySQL table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py index 5d94bdecaf62..9b82dfd4114e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -15,8 +15,6 @@ class NetezzaLinkedService(LinkedService): """Netezza linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class NetezzaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -61,9 +59,9 @@ class NetezzaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(NetezzaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py deleted file mode 100644 index 2fcc288fd5b7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class NetezzaLinkedService(LinkedService): - """Netezza linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py index b6c1ca9ba5da..b1845b17e610 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py @@ -35,8 +35,8 @@ class NetezzaPartitionSettings(Model): 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - def __init__(self, **kwargs): - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + def __init__(self, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None): + super(NetezzaPartitionSettings, self).__init__() + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py deleted file mode 100644 index 9f071eae60ff..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class NetezzaPartitionSettings(Model): - """The settings that will be leveraged for Netezza source partitioning. - - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(NetezzaPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py index 3c66032bf48d..bf0d141b4487 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -15,8 +15,6 @@ class NetezzaSource(CopySource): """A copy activity Netezza source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class NetezzaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -62,9 +60,9 @@ class NetezzaSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, **kwargs): - super(NetezzaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None): + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py deleted file mode 100644 index f5dcc07e63d8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class NetezzaSource(CopySource): - """A copy activity Netezza source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - :param partition_option: The partition mechanism that will be used for - Netezza read in parallel. Possible values include: 'None', 'DataSlice', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.NetezzaPartitionOption - :param partition_settings: The settings that will be leveraged for Netezza - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.NetezzaPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py index b7807273262b..65702c04dc05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py @@ -15,8 +15,6 @@ class NetezzaTableDataset(Dataset): """Netezza dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class NetezzaTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class NetezzaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class NetezzaTableDataset(Dataset): 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(NetezzaTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.netezza_table_dataset_schema = kwargs.get('netezza_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None): + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.netezza_table_dataset_schema = netezza_table_dataset_schema self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py deleted file mode 100644 index 29dd448ada75..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression - with resultType string). - :type table: object - :param netezza_table_dataset_schema: The schema name of the Netezza. Type: - string (or Expression with resultType string). - :type netezza_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: - super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.netezza_table_dataset_schema = netezza_table_dataset_schema - self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py index 01db8d71e924..b20a11ad9530 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -15,8 +15,6 @@ class ODataLinkedService(LinkedService): """Open Data Protocol (OData) linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class ODataLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The URL of the OData service endpoint. Type: string - (or Expression with resultType string). + :param url: The URL of the OData service endpoint. Type: string (or + Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', @@ -110,18 +108,18 @@ class ODataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ODataLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.tenant = kwargs.get('tenant', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) - self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None): + super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password + self.encrypted_credential = encrypted_credential self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py deleted file mode 100644 index fcf2d8bb9819..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of the OData service endpoint. Type: string - (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the - OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', - 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) - under which your application resides. Type: string (or Expression with - resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your - application registered in Azure Active Directory. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting - authorization to use Directory. Type: string (or Expression with - resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type - (key or cert) is used for service principal. Possible values include: - 'ServicePrincipalKey', 'ServicePrincipalCert' - :type aad_service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application - registered in Azure Active Directory. Type: string (or Expression with - resultType string). - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded - certificate of your application registered in Azure Active Directory. - Type: string (or Expression with resultType string). - :type service_principal_embedded_cert: - ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of - your certificate if your certificate has a password and you are using - AadServicePrincipal authentication. Type: string (or Expression with - resultType string). - :type service_principal_embedded_cert_password: - ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: - super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.tenant = tenant - self.service_principal_id = service_principal_id - self.aad_resource_id = aad_resource_id - self.aad_service_principal_credential_type = aad_service_principal_credential_type - self.service_principal_key = service_principal_key - self.service_principal_embedded_cert = service_principal_embedded_cert - self.service_principal_embedded_cert_password = service_principal_embedded_cert_password - self.encrypted_credential = encrypted_credential - self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py index 658cf40c8d2b..d81ec54fd561 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py @@ -15,8 +15,6 @@ class ODataResourceDataset(Dataset): """The Open Data Protocol (OData) resource dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ODataResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ODataResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param path: The OData resource path. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class ODataResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ODataResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None): + super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.path = path self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py deleted file mode 100644 index 5951a2cf6d80..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: The OData resource path. Type: string (or Expression with - resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: - super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py index c70f440ff6cb..b9c4e8ea9141 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py @@ -15,8 +15,6 @@ class ODataSource(CopySource): """A copy activity source for OData source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ODataSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ODataSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ODataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py deleted file mode 100644 index 83ba9bd7f2af..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ODataSource(CopySource): - """A copy activity source for OData source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py index 53d21dee2def..275cb7682dcf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -15,8 +15,6 @@ class OdbcLinkedService(LinkedService): """Open Database Connectivity (ODBC) linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class OdbcLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: + :param connection_string: The non-access credential portion of the + connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the @@ -75,12 +73,12 @@ class OdbcLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OdbcLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None): + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py deleted file mode 100644 index 2e376d23c67a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The non-access credential portion of - the connection string as well as an optional encrypted credential. Type: - string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the - ODBC data store. Possible values are: Anonymous and Basic. Type: string - (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string - specified in driver-specific property-value format. - :type credential: ~azure.mgmt.datafactory.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py index ced7e1dbd9e4..8c3535545ff8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -15,8 +15,6 @@ class OdbcSink(CopySink): """A copy activity ODBC sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -60,7 +58,7 @@ class OdbcSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OdbcSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py deleted file mode 100644 index 9a181f8df7e9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class OdbcSink(CopySink): - """A copy activity ODBC sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: A query to execute before starting the copy. Type: - string (or Expression with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py index 9761d0c0aeb5..c35a8b747ab2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py @@ -15,8 +15,6 @@ class OdbcSource(CopySource): """A copy activity source for ODBC databases. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class OdbcSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class OdbcSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OdbcSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py deleted file mode 100644 index 52b059a8ad91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class OdbcSource(CopySource): - """A copy activity source for ODBC databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py index 2f4f4261f4fc..fd2652eebc3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py @@ -15,8 +15,6 @@ class OdbcTableDataset(Dataset): """The ODBC table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class OdbcTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class OdbcTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The ODBC table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class OdbcTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OdbcTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py deleted file mode 100644 index 070ddccd180d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class OdbcTableDataset(Dataset): - """The ODBC table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The ODBC table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py index baa90666d669..f8ca4d6bf142 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -15,8 +15,6 @@ class Office365Dataset(Dataset): """The Office365 account. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class Office365Dataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class Office365Dataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param table_name: Required. Name of the dataset to extract from Office - 365. Type: string (or Expression with resultType string). + :param table_name: Name of the dataset to extract from Office 365. Type: + string (or Expression with resultType string). :type table_name: object :param predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with @@ -72,8 +70,8 @@ class Office365Dataset(Dataset): 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, } - def __init__(self, **kwargs): - super(Office365Dataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.predicate = kwargs.get('predicate', None) + def __init__(self, linked_service_name, table_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None): + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.predicate = predicate self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py deleted file mode 100644 index 5517f7daf9e3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py +++ /dev/null @@ -1,79 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class Office365Dataset(Dataset): - """The Office365 account. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. Name of the dataset to extract from Office - 365. Type: string (or Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the - specific rows to extract from Office 365. Type: string (or Expression with - resultType string). - :type predicate: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: - super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.predicate = predicate - self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py index 2dc98897482a..417dd66f710c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -15,8 +15,6 @@ class Office365LinkedService(LinkedService): """Office365 linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,19 +29,19 @@ class Office365LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param office365_tenant_id: Required. Azure tenant ID to which the Office - 365 account belongs. Type: string (or Expression with resultType string). + :param office365_tenant_id: Azure tenant ID to which the Office 365 + account belongs. Type: string (or Expression with resultType string). :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant - information under which your Azure AD web application resides. Type: - string (or Expression with resultType string). + :param service_principal_tenant_id: Specify the tenant information under + which your Azure AD web application resides. Type: string (or Expression + with resultType string). :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). + :param service_principal_id: Specify the application's client ID. Type: + string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. + :param service_principal_key: Specify the application's key. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime @@ -73,11 +71,11 @@ class Office365LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(Office365LinkedService, self).__init__(**kwargs) - self.office365_tenant_id = kwargs.get('office365_tenant_id', None) - self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, encrypted_credential=None): + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py deleted file mode 100644 index 5a69c0d895fa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param office365_tenant_id: Required. Azure tenant ID to which the Office - 365 account belongs. Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant - information under which your Azure AD web application resides. Type: - string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client - ID. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: - super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.office365_tenant_id = office365_tenant_id - self.service_principal_tenant_id = service_principal_tenant_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.encrypted_credential = encrypted_credential - self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py index de19818aaa7f..d180e8e5584d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -15,8 +15,6 @@ class Office365Source(CopySource): """A copy activity source for an Office365 service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class Office365Source(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). @@ -68,11 +66,11 @@ class Office365Source(CopySource): 'end_time': {'key': 'endTime', 'type': 'object'}, } - def __init__(self, **kwargs): - super(Office365Source, self).__init__(**kwargs) - self.allowed_groups = kwargs.get('allowed_groups', None) - self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) - self.date_filter_column = kwargs.get('date_filter_column', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None): + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py deleted file mode 100644 index fc2c4b095904..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class Office365Source(CopySource): - """A copy activity source for an Office365 service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param allowed_groups: The groups containing all the users. Type: array of - strings (or Expression with resultType array of strings). - :type allowed_groups: object - :param user_scope_filter_uri: The user scope uri. Type: string (or - Expression with resultType string). - :type user_scope_filter_uri: object - :param date_filter_column: The Column to apply the and . Type: string (or - Expression with resultType string). - :type date_filter_column: object - :param start_time: Start time of the requested range for this dataset. - Type: string (or Expression with resultType string). - :type start_time: object - :param end_time: End time of the requested range for this dataset. Type: - string (or Expression with resultType string). - :type end_time: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, - 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, - 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, - 'start_time': {'key': 'startTime', 'type': 'object'}, - 'end_time': {'key': 'endTime', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.allowed_groups = allowed_groups - self.user_scope_filter_uri = user_scope_filter_uri - self.date_filter_column = date_filter_column - self.start_time = start_time - self.end_time = end_time - self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py index db8cde8db784..dfaf8d979082 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py @@ -33,9 +33,9 @@ class Operation(Model): 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, } - def __init__(self, **kwargs): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.origin = kwargs.get('origin', None) - self.display = kwargs.get('display', None) - self.service_specification = kwargs.get('service_specification', None) + def __init__(self, name=None, origin=None, display=None, service_specification=None): + super(Operation, self).__init__() + self.name = name + self.origin = origin + self.display = display + self.service_specification = service_specification diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py index 1d96541c0581..44a481206fb6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py @@ -33,9 +33,9 @@ class OperationDisplay(Model): 'operation': {'key': 'operation', 'type': 'str'}, } - def __init__(self, **kwargs): - super(OperationDisplay, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) + def __init__(self, description=None, provider=None, resource=None, operation=None): + super(OperationDisplay, self).__init__() + self.description = description + self.provider = provider + self.resource = resource + self.operation = operation diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py deleted file mode 100644 index dfbb782627f4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationDisplay(Model): - """Metadata associated with the operation. - - :param description: The description of the operation. - :type description: str - :param provider: The name of the provider. - :type provider: str - :param resource: The name of the resource type on which the operation is - performed. - :type resource: str - :param operation: The type of operation: get, read, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: - super(OperationDisplay, self).__init__(**kwargs) - self.description = description - self.provider = provider - self.resource = resource - self.operation = operation diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py index 93bfaf4ed0de..304707a33606 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py @@ -30,8 +30,8 @@ class OperationLogSpecification(Model): 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, } - def __init__(self, **kwargs): - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.blob_duration = kwargs.get('blob_duration', None) + def __init__(self, name=None, display_name=None, blob_duration=None): + super(OperationLogSpecification, self).__init__() + self.name = name + self.display_name = display_name + self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py deleted file mode 100644 index 2cdd941fab7b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationLogSpecification(Model): - """Details about an operation related to logs. - - :param name: The name of the log category. - :type name: str - :param display_name: Localized display name. - :type display_name: str - :param blob_duration: Blobs created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: - super(OperationLogSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py index 974e0cbf4b0b..2e2053aedca7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py @@ -27,7 +27,7 @@ class OperationMetricAvailability(Model): 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, } - def __init__(self, **kwargs): - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = kwargs.get('time_grain', None) - self.blob_duration = kwargs.get('blob_duration', None) + def __init__(self, time_grain=None, blob_duration=None): + super(OperationMetricAvailability, self).__init__() + self.time_grain = time_grain + self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py deleted file mode 100644 index 312b83a23701..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricAvailability(Model): - """Defines how often data for a metric becomes available. - - :param time_grain: The granularity for the metric. - :type time_grain: str - :param blob_duration: Blob created in the customer storage account, per - hour. - :type blob_duration: str - """ - - _attribute_map = { - 'time_grain': {'key': 'timeGrain', 'type': 'str'}, - 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, - } - - def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: - super(OperationMetricAvailability, self).__init__(**kwargs) - self.time_grain = time_grain - self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py index 24232e7b5470..c71da5e1ed38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py @@ -30,8 +30,8 @@ class OperationMetricDimension(Model): 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, } - def __init__(self, **kwargs): - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) + def __init__(self, name=None, display_name=None, to_be_exported_for_shoebox=None): + super(OperationMetricDimension, self).__init__() + self.name = name + self.display_name = display_name + self.to_be_exported_for_shoebox = to_be_exported_for_shoebox diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py deleted file mode 100644 index 1d8610b7fab8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py +++ /dev/null @@ -1,37 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricDimension(Model): - """Defines the metric dimension. - - :param name: The name of the dimension for the metric. - :type name: str - :param display_name: The display name of the metric dimension. - :type display_name: str - :param to_be_exported_for_shoebox: Whether the dimension should be - exported to Azure Monitor. - :type to_be_exported_for_shoebox: bool - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: - super(OperationMetricDimension, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.to_be_exported_for_shoebox = to_be_exported_for_shoebox diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py index 77f533fdcebf..dd37143a6ce6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py @@ -54,15 +54,15 @@ class OperationMetricSpecification(Model): 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, } - def __init__(self, **kwargs): - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display_name = kwargs.get('display_name', None) - self.display_description = kwargs.get('display_description', None) - self.unit = kwargs.get('unit', None) - self.aggregation_type = kwargs.get('aggregation_type', None) - self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) - self.source_mdm_account = kwargs.get('source_mdm_account', None) - self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) - self.availabilities = kwargs.get('availabilities', None) - self.dimensions = kwargs.get('dimensions', None) + def __init__(self, name=None, display_name=None, display_description=None, unit=None, aggregation_type=None, enable_regional_mdm_account=None, source_mdm_account=None, source_mdm_namespace=None, availabilities=None, dimensions=None): + super(OperationMetricSpecification, self).__init__() + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.aggregation_type = aggregation_type + self.enable_regional_mdm_account = enable_regional_mdm_account + self.source_mdm_account = source_mdm_account + self.source_mdm_namespace = source_mdm_namespace + self.availabilities = availabilities + self.dimensions = dimensions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py deleted file mode 100644 index c1cc4ad39e72..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationMetricSpecification(Model): - """Details about an operation related to metrics. - - :param name: The name of the metric. - :type name: str - :param display_name: Localized display name of the metric. - :type display_name: str - :param display_description: The description of the metric. - :type display_description: str - :param unit: The unit that the metric is measured in. - :type unit: str - :param aggregation_type: The type of metric aggregation. - :type aggregation_type: str - :param enable_regional_mdm_account: Whether or not the service is using - regional MDM accounts. - :type enable_regional_mdm_account: str - :param source_mdm_account: The name of the MDM account. - :type source_mdm_account: str - :param source_mdm_namespace: The name of the MDM namespace. - :type source_mdm_namespace: str - :param availabilities: Defines how often data for metrics becomes - available. - :type availabilities: - list[~azure.mgmt.datafactory.models.OperationMetricAvailability] - :param dimensions: Defines the metric dimension. - :type dimensions: - list[~azure.mgmt.datafactory.models.OperationMetricDimension] - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, - } - - def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.display_description = display_description - self.unit = unit - self.aggregation_type = aggregation_type - self.enable_regional_mdm_account = enable_regional_mdm_account - self.source_mdm_account = source_mdm_account - self.source_mdm_namespace = source_mdm_namespace - self.availabilities = availabilities - self.dimensions = dimensions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py deleted file mode 100644 index 23305038a090..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Operation(Model): - """Azure Data Factory API operation definition. - - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param origin: The intended executor of the operation. - :type origin: str - :param display: Metadata associated with the operation. - :type display: ~azure.mgmt.datafactory.models.OperationDisplay - :param service_specification: Details about a service operation. - :type service_specification: - ~azure.mgmt.datafactory.models.OperationServiceSpecification - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, - } - - def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: - super(Operation, self).__init__(**kwargs) - self.name = name - self.origin = origin - self.display = display - self.service_specification = service_specification diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py index 82622a44af5a..26cac12bec97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py @@ -28,7 +28,7 @@ class OperationServiceSpecification(Model): 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, } - def __init__(self, **kwargs): - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = kwargs.get('log_specifications', None) - self.metric_specifications = kwargs.get('metric_specifications', None) + def __init__(self, log_specifications=None, metric_specifications=None): + super(OperationServiceSpecification, self).__init__() + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py deleted file mode 100644 index 4215dac6eb7f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OperationServiceSpecification(Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, - } - - def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = log_specifications - self.metric_specifications = metric_specifications diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py index 19f715dfd9e2..8fd0b5ebf775 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -15,8 +15,6 @@ class OracleLinkedService(LinkedService): """Oracle database. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class OracleLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -63,9 +61,9 @@ class OracleLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OracleLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): + super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py deleted file mode 100644 index a46f0463afb5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class OracleLinkedService(LinkedService): - """Oracle database. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py index b4e9aa1b92f3..d7b70b724d4f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -38,9 +38,9 @@ class OraclePartitionSettings(Model): 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = kwargs.get('partition_names', None) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + def __init__(self, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None): + super(OraclePartitionSettings, self).__init__() + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py deleted file mode 100644 index 10641aab7f9f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class OraclePartitionSettings(Model): - """The settings that will be leveraged for Oracle source partitioning. - - :param partition_names: Names of the physical partitions of Oracle table. - :type partition_names: object - :param partition_column_name: The name of the column in integer type that - will be used for proceeding range partitioning. Type: string (or - Expression with resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_names': {'key': 'partitionNames', 'type': 'object'}, - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(OraclePartitionSettings, self).__init__(**kwargs) - self.partition_names = partition_names - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py index 44ce000868b7..89921338fc5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -15,8 +15,6 @@ class OracleServiceCloudLinkedService(LinkedService): """Oracle Service Cloud linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,15 +29,15 @@ class OracleServiceCloudLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The URL of the Oracle Service Cloud instance. + :param host: The URL of the Oracle Service Cloud instance. :type host: object - :param username: Required. The user name that you use to access Oracle - Service Cloud server. + :param username: The user name that you use to access Oracle Service Cloud + server. :type username: object - :param password: Required. The password corresponding to the user name - that you provided in the username key. + :param password: The password corresponding to the user name that you + provided in the username key. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: @@ -83,13 +81,13 @@ class OracleServiceCloudLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, username, password, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py deleted file mode 100644 index 8732e2e82ca0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py +++ /dev/null @@ -1,95 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle - Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name - that you provided in the username key. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py index 35ce3439d8a0..19a2727042e1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -15,8 +15,6 @@ class OracleServiceCloudObjectDataset(Dataset): """Oracle Service Cloud dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class OracleServiceCloudObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class OracleServiceCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class OracleServiceCloudObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py deleted file mode 100644 index a478e1abc828..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py index f42291941393..32097516792e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -15,8 +15,6 @@ class OracleServiceCloudSource(CopySource): """A copy activity Oracle Service Cloud source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class OracleServiceCloudSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class OracleServiceCloudSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OracleServiceCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py deleted file mode 100644 index 1fa5d6eb3748..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class OracleServiceCloudSource(CopySource): - """A copy activity Oracle Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py index 1f6c747c49db..879a6a86e771 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -15,8 +15,6 @@ class OracleSink(CopySink): """A copy activity Oracle sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). @@ -60,7 +58,7 @@ class OracleSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OracleSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py deleted file mode 100644 index 3a571c66732a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class OracleSink(CopySink): - """A copy activity Oracle sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index db436192eca1..9ba74b694dc4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -15,8 +15,6 @@ class OracleSource(CopySource): """A copy activity Oracle source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class OracleSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). @@ -67,10 +65,10 @@ class OracleSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, **kwargs): - super(OracleSource, self).__init__(**kwargs) - self.oracle_reader_query = kwargs.get('oracle_reader_query', None) - self.query_timeout = kwargs.get('query_timeout', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None): + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py deleted file mode 100644 index 0a871809896e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class OracleSource(CopySource): - """A copy activity Oracle source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param oracle_reader_query: Oracle reader query. Type: string (or - Expression with resultType string). - :type oracle_reader_query: object - :param query_timeout: Query timeout. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param partition_option: The partition mechanism that will be used for - Oracle read in parallel. Possible values include: 'None', - 'PhysicalPartitionsOfTable', 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.OraclePartitionOption - :param partition_settings: The settings that will be leveraged for Oracle - source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.OraclePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.oracle_reader_query = oracle_reader_query - self.query_timeout = query_timeout - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py index c76b5ced3e5c..867e01cd5174 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -15,8 +15,6 @@ class OracleTableDataset(Dataset): """The on-premises Oracle database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class OracleTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class OracleTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class OracleTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(OracleTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None): + super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.oracle_table_dataset_schema = oracle_table_dataset_schema + self.table = table self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py deleted file mode 100644 index b588fbac5244..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param oracle_table_dataset_schema: The schema name of the on-premises - Oracle database. Type: string (or Expression with resultType string). - :type oracle_table_dataset_schema: object - :param table: The table name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: - super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.oracle_table_dataset_schema = oracle_table_dataset_schema - self.table = table - self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py index 8f0a0322062c..b91f4d5952f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py @@ -15,8 +15,6 @@ class OrcFormat(DatasetStorageFormat): """The data stored in Optimized Row Columnar (ORC) format. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class OrcFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -34,13 +32,6 @@ class OrcFormat(DatasetStorageFormat): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(OrcFormat, self).__init__(**kwargs) + def __init__(self, additional_properties=None, serializer=None, deserializer=None): + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py deleted file mode 100644 index 40a0e389ccc3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class OrcFormat(DatasetStorageFormat): - """The data stored in Optimized Row Columnar (ORC) format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py index aef855d955f0..5747dc8efedf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py @@ -15,10 +15,8 @@ class ParameterSpecification(Model): """Definition of a single parameter for an entity. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: 'Object', - 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :param type: Parameter type. Possible values include: 'Object', 'String', + 'Int', 'Float', 'Bool', 'Array', 'SecureString' :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. :type default_value: object @@ -33,7 +31,7 @@ class ParameterSpecification(Model): 'default_value': {'key': 'defaultValue', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.default_value = kwargs.get('default_value', None) + def __init__(self, type, default_value=None): + super(ParameterSpecification, self).__init__() + self.type = type + self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py deleted file mode 100644 index d5b6f981d365..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ParameterSpecification(Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: 'Object', - 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' - :type type: str or ~azure.mgmt.datafactory.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, *, type, default_value=None, **kwargs) -> None: - super(ParameterSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py index ffaf8e1f6d93..44f0516308be 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -15,8 +15,6 @@ class ParquetDataset(Dataset): """Parquet dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ParquetDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,9 +39,9 @@ class ParquetDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param location: Required. The location of the parquet storage. + :param location: The location of the parquet storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression_codec: :type compression_codec: object @@ -69,8 +67,8 @@ class ParquetDataset(Dataset): 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ParquetDataset, self).__init__(**kwargs) - self.location = kwargs.get('location', None) - self.compression_codec = kwargs.get('compression_codec', None) + def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None): + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.location = location + self.compression_codec = compression_codec self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py deleted file mode 100644 index 4d754450ce15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ParquetDataset(Dataset): - """Parquet dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param location: Required. The location of the parquet storage. - :type location: ~azure.mgmt.datafactory.models.DatasetLocation - :param compression_codec: - :type compression_codec: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.location = location - self.compression_codec = compression_codec - self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py index d742ff24b522..4d953b3a4e78 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py @@ -15,8 +15,6 @@ class ParquetFormat(DatasetStorageFormat): """The data stored in Parquet format. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class ParquetFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -34,13 +32,6 @@ class ParquetFormat(DatasetStorageFormat): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ParquetFormat, self).__init__(**kwargs) + def __init__(self, additional_properties=None, serializer=None, deserializer=None): + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py deleted file mode 100644 index 36a6f5c88c4d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class ParquetFormat(DatasetStorageFormat): - """The data stored in Parquet format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: - super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py index dea3e0f8fc52..fc6f7a5ac0fb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -15,8 +15,6 @@ class ParquetSink(CopySink): """A copy activity Parquet sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -59,7 +57,7 @@ class ParquetSink(CopySink): 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, **kwargs): - super(ParquetSink, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None): + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py deleted file mode 100644 index 463044fef83f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py +++ /dev/null @@ -1,65 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class ParquetSink(CopySink): - """A copy activity Parquet sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py index ab888c7361a2..02f7d1dea135 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -15,8 +15,6 @@ class ParquetSource(CopySource): """A copy activity Parquet source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ParquetSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -50,7 +48,7 @@ class ParquetSource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, **kwargs): - super(ParquetSource, self).__init__(**kwargs) - self.store_settings = kwargs.get('store_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.store_settings = store_settings self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py deleted file mode 100644 index 332a7b9b8c5e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ParquetSource(CopySource): - """A copy activity Parquet source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param store_settings: Parquet store settings. - :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.store_settings = store_settings - self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py index d7ae0bc075e7..1f9ea0741517 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -15,8 +15,6 @@ class PaypalLinkedService(LinkedService): """Paypal Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,11 @@ class PaypalLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The URL of the PayPal instance. (i.e. - api.sandbox.paypal.com) + :param host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) :type host: object - :param client_id: Required. The client ID associated with your PayPal - application. + :param client_id: The client ID associated with your PayPal application. :type client_id: object :param client_secret: The client secret associated with your PayPal application. @@ -80,13 +76,13 @@ class PaypalLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PaypalLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py deleted file mode 100644 index c11cda7a52f3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the PayPal instance. (i.e. - api.sandbox.paypal.com) - :type host: object - :param client_id: Required. The client ID associated with your PayPal - application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py index d0fdc678841b..a7f1ef9964d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py @@ -15,8 +15,6 @@ class PaypalObjectDataset(Dataset): """Paypal Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class PaypalObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class PaypalObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class PaypalObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PaypalObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py deleted file mode 100644 index 55df7c97166d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py index 94cdbccae6ee..cd1770c81c9e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -15,8 +15,6 @@ class PaypalSource(CopySource): """A copy activity Paypal Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class PaypalSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class PaypalSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PaypalSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py deleted file mode 100644 index 05730d0ae067..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PaypalSource(CopySource): - """A copy activity Paypal Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py index 308a8e4cf592..da390e8f0d34 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -15,8 +15,6 @@ class PhoenixLinkedService(LinkedService): """Phoenix server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class PhoenixLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The IP address or host name of the Phoenix server. - (i.e. 192.168.222.160) + :param host: The IP address or host name of the Phoenix server. (i.e. + 192.168.222.160) :type host: object :param port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. @@ -43,8 +41,8 @@ class PhoenixLinkedService(LinkedService): (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. :type http_path: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Phoenix server. Possible values include: 'Anonymous', + :param authentication_type: The authentication mechanism used to connect + to the Phoenix server. Possible values include: 'Anonymous', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType @@ -104,18 +102,18 @@ class PhoenixLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PhoenixLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): + super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py deleted file mode 100644 index de8210c2cc89..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py +++ /dev/null @@ -1,121 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Phoenix server. - (i.e. 192.168.222.160) - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for - client connections. The default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. - (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix - if using WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Phoenix server. Possible values include: 'Anonymous', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py index ccaa2eb49abd..463d37b3c932 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py @@ -15,8 +15,6 @@ class PhoenixObjectDataset(Dataset): """Phoenix server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class PhoenixObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class PhoenixObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class PhoenixObjectDataset(Dataset): 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PhoenixObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None): + super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.phoenix_object_dataset_schema = phoenix_object_dataset_schema self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py deleted file mode 100644 index cda4dc41dc22..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression - with resultType string). - :type table: object - :param phoenix_object_dataset_schema: The schema name of the Phoenix. - Type: string (or Expression with resultType string). - :type phoenix_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: - super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.phoenix_object_dataset_schema = phoenix_object_dataset_schema - self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py index 30171c6177ff..5728762329ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -15,8 +15,6 @@ class PhoenixSource(CopySource): """A copy activity Phoenix server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class PhoenixSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class PhoenixSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PhoenixSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py deleted file mode 100644 index 1384f59e1aa4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PhoenixSource(CopySource): - """A copy activity Phoenix server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py index bebc05cb1824..3e66bad233e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py @@ -24,6 +24,6 @@ class PipelineFolder(Model): 'name': {'key': 'name', 'type': 'str'}, } - def __init__(self, **kwargs): - super(PipelineFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + def __init__(self, name=None): + super(PipelineFolder, self).__init__() + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py deleted file mode 100644 index 02c9b8dbbff1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineFolder(Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear - at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__(self, *, name: str=None, **kwargs) -> None: - super(PipelineFolder, self).__init__(**kwargs) - self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py index aa8b23e62932..1d39beea8145 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py @@ -18,12 +18,9 @@ class PipelineReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: - "PipelineReference" . + :ivar type: Pipeline reference type. Default value: "PipelineReference" . :vartype type: str - :param reference_name: Required. Reference pipeline name. + :param reference_name: Reference pipeline name. :type reference_name: str :param name: Reference name. :type name: str @@ -42,7 +39,7 @@ class PipelineReference(Model): type = "PipelineReference" - def __init__(self, **kwargs): - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.name = kwargs.get('name', None) + def __init__(self, reference_name, name=None): + super(PipelineReference, self).__init__() + self.reference_name = reference_name + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py deleted file mode 100644 index ce63f06092d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineReference(Model): - """Pipeline reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Pipeline reference type. Default value: - "PipelineReference" . - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - type = "PipelineReference" - - def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py index a39deaccc87b..e59567d8037d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py @@ -72,13 +72,13 @@ class PipelineResource(SubResource): 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } - def __init__(self, **kwargs): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.activities = kwargs.get('activities', None) - self.parameters = kwargs.get('parameters', None) - self.variables = kwargs.get('variables', None) - self.concurrency = kwargs.get('concurrency', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) + def __init__(self, additional_properties=None, description=None, activities=None, parameters=None, variables=None, concurrency=None, annotations=None, folder=None): + super(PipelineResource, self).__init__() + self.additional_properties = additional_properties + self.description = description + self.activities = activities + self.parameters = parameters + self.variables = variables + self.concurrency = concurrency + self.annotations = annotations + self.folder = folder diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py deleted file mode 100644 index 8299cdb73887..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, - ~azure.mgmt.datafactory.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the - Pipeline. - :type annotations: list[object] - :param folder: The folder that this Pipeline is in. If not specified, - Pipeline will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.PipelineFolder - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.activities = activities - self.parameters = parameters - self.variables = variables - self.concurrency = concurrency - self.annotations = annotations - self.folder = folder diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py index a2407bd9835f..bb28998cf999 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -82,9 +82,9 @@ class PipelineRun(Model): 'message': {'key': 'message', 'type': 'str'}, } - def __init__(self, **kwargs): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(PipelineRun, self).__init__() + self.additional_properties = additional_properties self.run_id = None self.run_group_id = None self.is_latest = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py index acefb80fd078..dc8d3ae847c4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py @@ -38,8 +38,8 @@ class PipelineRunInvokedBy(Model): 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, } - def __init__(self, **kwargs): - super(PipelineRunInvokedBy, self).__init__(**kwargs) + def __init__(self): + super(PipelineRunInvokedBy, self).__init__() self.name = None self.id = None self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py deleted file mode 100644 index c954a18b8a67..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRunInvokedBy(Model): - """Provides entity name and id that started the pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py deleted file mode 100644 index 33e0f23f24ac..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRun(Model): - """Information about a pipeline run. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a - pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in - its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair - used in the pipeline run. - :vartype parameters: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event - in ISO8601 format. - :vartype last_updated: datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str - """ - - _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py index c4591c5467ba..382ca47e7c41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py @@ -15,9 +15,7 @@ class PipelineRunsQueryResponse(Model): """A list pipeline runs. - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. + :param value: List of pipeline runs. :type value: list[~azure.mgmt.datafactory.models.PipelineRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -33,7 +31,7 @@ class PipelineRunsQueryResponse(Model): 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } - def __init__(self, **kwargs): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) + def __init__(self, value, continuation_token=None): + super(PipelineRunsQueryResponse, self).__init__() + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py deleted file mode 100644 index fbc689ec1632..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PipelineRunsQueryResponse(Model): - """A list pipeline runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipeline runs. - :type value: list[~azure.mgmt.datafactory.models.PipelineRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py index 5a261d8fea84..0055d320b598 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py @@ -44,10 +44,10 @@ class PolybaseSettings(Model): 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reject_type = kwargs.get('reject_type', None) - self.reject_value = kwargs.get('reject_value', None) - self.reject_sample_value = kwargs.get('reject_sample_value', None) - self.use_type_default = kwargs.get('use_type_default', None) + def __init__(self, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None): + super(PolybaseSettings, self).__init__() + self.additional_properties = additional_properties + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py deleted file mode 100644 index baae78b14c5f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class PolybaseSettings(Model): - """PolyBase settings. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param reject_type: Reject type. Possible values include: 'value', - 'percentage' - :type reject_type: str or - ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType - :param reject_value: Specifies the value or the percentage of rows that - can be rejected before the query fails. Type: number (or Expression with - resultType number), minimum: 0. - :type reject_value: object - :param reject_sample_value: Determines the number of rows to attempt to - retrieve before the PolyBase recalculates the percentage of rejected rows. - Type: integer (or Expression with resultType integer), minimum: 0. - :type reject_sample_value: object - :param use_type_default: Specifies how to handle missing values in - delimited text files when PolyBase retrieves data from the text file. - Type: boolean (or Expression with resultType boolean). - :type use_type_default: object - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'reject_type': {'key': 'rejectType', 'type': 'str'}, - 'reject_value': {'key': 'rejectValue', 'type': 'object'}, - 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, - 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: - super(PolybaseSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.reject_type = reject_type - self.reject_value = reject_value - self.reject_sample_value = reject_sample_value - self.use_type_default = use_type_default diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py index f8ce5bd0803e..3203c1f12658 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -15,8 +15,6 @@ class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class PostgreSqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. + :param connection_string: The connection string. :type connection_string: ~azure.mgmt.datafactory.models.SecretBase :param password: The Azure key vault secret reference of password in connection string. @@ -62,9 +60,9 @@ class PostgreSqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py deleted file mode 100644 index 0221aa620064..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. - :type connection_string: ~azure.mgmt.datafactory.models.SecretBase - :param password: The Azure key vault secret reference of password in - connection string. - :type password: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py index 51dd25b25c60..d0d7bbf3d398 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py @@ -15,8 +15,6 @@ class PostgreSqlSource(CopySource): """A copy activity source for PostgreSQL databases. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class PostgreSqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class PostgreSqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PostgreSqlSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py deleted file mode 100644 index 8aa12e4bdf8d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PostgreSqlSource(CopySource): - """A copy activity source for PostgreSQL databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py index 031a2479815b..03716ef39975 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py @@ -15,8 +15,6 @@ class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class PostgreSqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class PostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class PostgreSqlTableDataset(Dataset): 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.postgre_sql_table_dataset_schema = kwargs.get('postgre_sql_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None): + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py deleted file mode 100644 index 8adb7bd409ea..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with - resultType string). - :type table: object - :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: - string (or Expression with resultType string). - :type postgre_sql_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema - self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py index 21f18f07b262..b209ac5cc3c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -15,8 +15,6 @@ class PrestoLinkedService(LinkedService): """Presto server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,22 +29,20 @@ class PrestoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The IP address or host name of the Presto server. - (i.e. 192.168.222.160) + :param host: The IP address or host name of the Presto server. (i.e. + 192.168.222.160) :type host: object - :param server_version: Required. The version of the Presto server. (i.e. - 0.148-t) + :param server_version: The version of the Presto server. (i.e. 0.148-t) :type server_version: object - :param catalog: Required. The catalog context for all request against the - server. + :param catalog: The catalog context for all request against the server. :type catalog: object :param port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. :type port: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :param authentication_type: The authentication mechanism used to connect + to the Presto server. Possible values include: 'Anonymous', 'LDAP' :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. @@ -113,20 +109,20 @@ class PrestoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PrestoLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.server_version = kwargs.get('server_version', None) - self.catalog = kwargs.get('catalog', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.time_zone_id = kwargs.get('time_zone_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None): + super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.server_version = server_version + self.catalog = catalog + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.time_zone_id = time_zone_id + self.encrypted_credential = encrypted_credential self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py deleted file mode 100644 index 75ab99d5a58f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class PrestoLinkedService(LinkedService): - """Presto server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The IP address or host name of the Presto server. - (i.e. 192.168.222.160) - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. - 0.148-t) - :type server_version: object - :param catalog: Required. The catalog context for all request against the - server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client - connections. The default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to - connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid - values for this option are specified in the IANA Time Zone Database. The - default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: - super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.server_version = server_version - self.catalog = catalog - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.time_zone_id = time_zone_id - self.encrypted_credential = encrypted_credential - self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py index eb80e1a97750..ca98e4488fb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py @@ -15,8 +15,6 @@ class PrestoObjectDataset(Dataset): """Presto server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class PrestoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class PrestoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class PrestoObjectDataset(Dataset): 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PrestoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None): + super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.presto_object_dataset_schema = presto_object_dataset_schema self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py deleted file mode 100644 index e3bd2f7e36aa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class PrestoObjectDataset(Dataset): - """Presto server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression - with resultType string). - :type table: object - :param presto_object_dataset_schema: The schema name of the Presto. Type: - string (or Expression with resultType string). - :type presto_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: - super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.presto_object_dataset_schema = presto_object_dataset_schema - self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py index 9b7274011265..0c35009501f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -15,8 +15,6 @@ class PrestoSource(CopySource): """A copy activity Presto server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class PrestoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class PrestoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(PrestoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py deleted file mode 100644 index 47fe3eb5f790..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class PrestoSource(CopySource): - """A copy activity Presto server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py index 6353c1cda96a..a5d68a8db088 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -15,8 +15,6 @@ class QuickBooksLinkedService(LinkedService): """QuickBooks server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,25 +29,21 @@ class QuickBooksLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to - authorize. + :param company_id: The company ID of the QuickBooks company to authorize. :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 - authentication. + :param consumer_key: The consumer key for OAuth 1.0 authentication. :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 - authentication. + :param consumer_secret: The consumer secret for OAuth 1.0 authentication. :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 - authentication. + :param access_token: The access token for OAuth 1.0 authentication. :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth - 1.0 authentication. + :param access_token_secret: The access token secret for OAuth 1.0 + authentication. :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -87,14 +81,14 @@ class QuickBooksLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(QuickBooksLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.company_id = kwargs.get('company_id', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.consumer_secret = kwargs.get('consumer_secret', None) - self.access_token = kwargs.get('access_token', None) - self.access_token_secret = kwargs.get('access_token_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None): + super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.company_id = company_id + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = access_token + self.access_token_secret = access_token_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.encrypted_credential = encrypted_credential self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py deleted file mode 100644 index be12fc5cfba5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com) - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to - authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 - authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 - authentication. - :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 - authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth - 1.0 authentication. - :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: - super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.company_id = company_id - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self.access_token = access_token - self.access_token_secret = access_token_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.encrypted_credential = encrypted_credential - self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py index 73446d0ed938..156255115fe7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py @@ -15,8 +15,6 @@ class QuickBooksObjectDataset(Dataset): """QuickBooks server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class QuickBooksObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class QuickBooksObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class QuickBooksObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py deleted file mode 100644 index 65f67d2b20af..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py index cce0a026ae5a..cce839ce513f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -15,8 +15,6 @@ class QuickBooksSource(CopySource): """A copy activity QuickBooks server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class QuickBooksSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class QuickBooksSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(QuickBooksSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py deleted file mode 100644 index a00f35d4e1c1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class QuickBooksSource(CopySource): - """A copy activity QuickBooks server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py index f23d452392b0..a36216b08620 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py @@ -40,11 +40,11 @@ class RecurrenceSchedule(Model): 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, } - def __init__(self, **kwargs): - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.minutes = kwargs.get('minutes', None) - self.hours = kwargs.get('hours', None) - self.week_days = kwargs.get('week_days', None) - self.month_days = kwargs.get('month_days', None) - self.monthly_occurrences = kwargs.get('monthly_occurrences', None) + def __init__(self, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None): + super(RecurrenceSchedule, self).__init__() + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py index bbbe1fa28f17..ef87c271fe7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py @@ -31,8 +31,8 @@ class RecurrenceScheduleOccurrence(Model): 'occurrence': {'key': 'occurrence', 'type': 'int'}, } - def __init__(self, **kwargs): - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.day = kwargs.get('day', None) - self.occurrence = kwargs.get('occurrence', None) + def __init__(self, additional_properties=None, day=None, occurrence=None): + super(RecurrenceScheduleOccurrence, self).__init__() + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py deleted file mode 100644 index 10aea1f00163..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RecurrenceScheduleOccurrence(Model): - """The recurrence schedule occurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param day: The day of the week. Possible values include: 'Sunday', - 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' - :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek - :param occurrence: The occurrence. - :type occurrence: int - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'day': {'key': 'day', 'type': 'DayOfWeek'}, - 'occurrence': {'key': 'occurrence', 'type': 'int'}, - } - - def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: - super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.day = day - self.occurrence = occurrence diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py deleted file mode 100644 index fbe44fa3f021..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RecurrenceSchedule(Model): - """The recurrence schedule. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param minutes: The minutes. - :type minutes: list[int] - :param hours: The hours. - :type hours: list[int] - :param week_days: The days of the week. - :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] - :param month_days: The month days. - :type month_days: list[int] - :param monthly_occurrences: The monthly occurrences. - :type monthly_occurrences: - list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'hours': {'key': 'hours', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, - } - - def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: - super(RecurrenceSchedule, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.minutes = minutes - self.hours = hours - self.week_days = week_days - self.month_days = month_days - self.monthly_occurrences = monthly_occurrences diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py index a2e3bddb9425..8d5aba7e3ed0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py @@ -15,15 +15,13 @@ class RedirectIncompatibleRowSettings(Model): """Redirect incompatible row settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage - SAS, or Azure Data Lake Store linked service used for redirecting - incompatible row. Must be specified if redirectIncompatibleRowSettings is - specified. Type: string (or Expression with resultType string). + :param linked_service_name: Name of the Azure Storage, Storage SAS, or + Azure Data Lake Store linked service used for redirecting incompatible + row. Must be specified if redirectIncompatibleRowSettings is specified. + Type: string (or Expression with resultType string). :type linked_service_name: object :param path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). @@ -40,8 +38,8 @@ class RedirectIncompatibleRowSettings(Model): 'path': {'key': 'path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, additional_properties=None, path=None): + super(RedirectIncompatibleRowSettings, self).__init__() + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py deleted file mode 100644 index b47878ef4354..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RedirectIncompatibleRowSettings(Model): - """Redirect incompatible row settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage - SAS, or Azure Data Lake Store linked service used for redirecting - incompatible row. Must be specified if redirectIncompatibleRowSettings is - specified. Type: string (or Expression with resultType string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py index 7114b85e10db..46552265701d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py @@ -18,17 +18,15 @@ class RedshiftUnloadSettings(Model): will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked - service which will be used for the unload operation when copying from the - Amazon Redshift source. + :param s3_linked_service_name: The name of the Amazon S3 linked service + which will be used for the unload operation when copying from the Amazon + Redshift source. :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which - will be used to store the unloaded data from Amazon Redshift source. The - bucket must be in the same region as the Amazon Redshift source. Type: - string (or Expression with resultType string). + :param bucket_name: The bucket of the interim Amazon S3 which will be used + to store the unloaded data from Amazon Redshift source. The bucket must be + in the same region as the Amazon Redshift source. Type: string (or + Expression with resultType string). :type bucket_name: object """ @@ -42,7 +40,7 @@ class RedshiftUnloadSettings(Model): 'bucket_name': {'key': 'bucketName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) - self.bucket_name = kwargs.get('bucket_name', None) + def __init__(self, s3_linked_service_name, bucket_name): + super(RedshiftUnloadSettings, self).__init__() + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py deleted file mode 100644 index a40d014a32f9..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py +++ /dev/null @@ -1,48 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RedshiftUnloadSettings(Model): - """The Amazon S3 settings needed for the interim Amazon S3 when copying from - Amazon Redshift with unload. With this, data from Amazon Redshift source - will be unloaded into S3 first and then copied into the targeted sink from - the interim S3. - - All required parameters must be populated in order to send to Azure. - - :param s3_linked_service_name: Required. The name of the Amazon S3 linked - service which will be used for the unload operation when copying from the - Amazon Redshift source. - :type s3_linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: Required. The bucket of the interim Amazon S3 which - will be used to store the unloaded data from Amazon Redshift source. The - bucket must be in the same region as the Amazon Redshift source. Type: - string (or Expression with resultType string). - :type bucket_name: object - """ - - _validation = { - 's3_linked_service_name': {'required': True}, - 'bucket_name': {'required': True}, - } - - _attribute_map = { - 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, - 'bucket_name': {'key': 'bucketName', 'type': 'object'}, - } - - def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: - super(RedshiftUnloadSettings, self).__init__(**kwargs) - self.s3_linked_service_name = s3_linked_service_name - self.bucket_name = bucket_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py index 2450f31222df..aa33cb06da39 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -15,8 +15,6 @@ class RelationalSource(CopySource): """A copy activity source for various relational databases. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class RelationalSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class RelationalSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RelationalSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py deleted file mode 100644 index f88383cbd729..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class RelationalSource(CopySource): - """A copy activity source for various relational databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py index e5dd2e0786c8..ea0c2bc9d744 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py @@ -15,8 +15,6 @@ class RelationalTableDataset(Dataset): """The relational table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class RelationalTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class RelationalTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The relational table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class RelationalTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RelationalTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py deleted file mode 100644 index 3c85d95f8033..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class RelationalTableDataset(Dataset): - """The relational table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The relational table name. Type: string (or Expression - with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py index 8de6a70ecc99..3b5bf3bde215 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py @@ -18,8 +18,6 @@ class RerunTriggerResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -28,7 +26,7 @@ class RerunTriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of the rerun trigger. + :param properties: Properties of the rerun trigger. :type properties: ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger """ @@ -49,6 +47,6 @@ class RerunTriggerResource(SubResource): 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, } - def __init__(self, **kwargs): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + def __init__(self, properties): + super(RerunTriggerResource, self).__init__() + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py deleted file mode 100644 index 19814ad0d76f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py index 8c5ca2d67f3c..0e62cee1c262 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -19,8 +19,6 @@ class RerunTumblingWindowTrigger(Trigger): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -34,18 +32,18 @@ class RerunTumblingWindowTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period - for which restatement is initiated. Only UTC time is currently supported. + :param requested_start_time: The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. :type requested_start_time: datetime - :param requested_end_time: Required. The end time for the time period for - which restatement is initiated. Only UTC time is currently supported. + :param requested_end_time: The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. + :param max_concurrency: The max number of parallel time windows (ready for + execution) for which a rerun is triggered. :type max_concurrency: int """ @@ -69,10 +67,10 @@ class RerunTumblingWindowTrigger(Trigger): 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } - def __init__(self, **kwargs): - super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.parent_trigger = kwargs.get('parent_trigger', None) - self.requested_start_time = kwargs.get('requested_start_time', None) - self.requested_end_time = kwargs.get('requested_end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) + def __init__(self, requested_start_time, requested_end_time, max_concurrency, additional_properties=None, description=None, annotations=None, parent_trigger=None): + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations) + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py index 4b87f070b6be..50d6a0b91966 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py @@ -15,16 +15,14 @@ class RerunTumblingWindowTriggerActionParameters(Model): """Rerun tumbling window trigger Parameters. - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which + :param start_time: The start time for the time period for which restatement is initiated. Only UTC time is currently supported. :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. + :param end_time: The end time for the time period for which restatement is + initiated. Only UTC time is currently supported. :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. + :param max_concurrency: The max number of parallel time windows (ready for + execution) for which a rerun is triggered. :type max_concurrency: int """ @@ -40,8 +38,8 @@ class RerunTumblingWindowTriggerActionParameters(Model): 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, } - def __init__(self, **kwargs): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) + def __init__(self, start_time, end_time, max_concurrency): + super(RerunTumblingWindowTriggerActionParameters, self).__init__() + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py deleted file mode 100644 index 6fadecca588b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py deleted file mode 100644 index 4a7a20759c1b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_py3 import Trigger - - -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows - from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period - for which restatement is initiated. Only UTC time is currently supported. - :type requested_start_time: datetime - :param requested_end_time: Required. The end time for the time period for - which restatement is initiated. Only UTC time is currently supported. - :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.parent_trigger = parent_trigger - self.requested_start_time = requested_start_time - self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency - self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py index f6b2d7d3b512..f68a72080d53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py @@ -48,11 +48,11 @@ class Resource(Model): 'e_tag': {'key': 'eTag', 'type': 'str'}, } - def __init__(self, **kwargs): - super(Resource, self).__init__(**kwargs) + def __init__(self, location=None, tags=None): + super(Resource, self).__init__() self.id = None self.name = None self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) + self.location = location + self.tags = tags self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py deleted file mode 100644 index cfc0e4b09aa5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Resource(Model): - """Azure Data Factory top-level resource. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location - self.tags = tags - self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py index 16d1af502787..658fc1d32f96 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -15,8 +15,6 @@ class ResponsysLinkedService(LinkedService): """Responsys linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,12 +29,12 @@ class ResponsysLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the Responsys server. + :param endpoint: The endpoint of the Responsys server. :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys - application. Type: string (or Expression with resultType string). + :param client_id: The client ID associated with the Responsys application. + Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). @@ -82,13 +80,13 @@ class ResponsysLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ResponsysLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py deleted file mode 100644 index 6d8a74a0a34b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ResponsysLinkedService(LinkedService): - """Responsys linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py index f459e69113a1..d9563d4750cd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py @@ -15,8 +15,6 @@ class ResponsysObjectDataset(Dataset): """Responsys dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ResponsysObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ResponsysObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class ResponsysObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ResponsysObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py deleted file mode 100644 index c5f375910aaf..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ResponsysObjectDataset(Dataset): - """Responsys dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py index fd25b8e71377..7f641db163f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -15,8 +15,6 @@ class ResponsysSource(CopySource): """A copy activity Responsys source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ResponsysSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ResponsysSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ResponsysSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py deleted file mode 100644 index 8d5e4ac091f7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ResponsysSource(CopySource): - """A copy activity Responsys source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py index 9a5d41858e54..602e0419b5ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -15,8 +15,6 @@ class RestResourceDataset(Dataset): """A Rest service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class RestResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class RestResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). @@ -83,11 +81,11 @@ class RestResourceDataset(Dataset): 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RestResourceDataset, self).__init__(**kwargs) - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None): + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py deleted file mode 100644 index 99f39c97f373..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class RestResourceDataset(Dataset): - """A Rest service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param relative_url: The relative URL to the resource that the RESTful API - provides. Type: string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: - super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py index 0fbb15654438..7b4c8784a345 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -15,8 +15,6 @@ class RestServiceLinkedService(LinkedService): """Rest Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,17 +29,17 @@ class RestServiceLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The base URL of the REST service. + :param url: The base URL of the REST service. :type url: object :param enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to - connect to the REST service. Possible values include: 'Anonymous', - 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :param authentication_type: Type of authentication used to connect to the + REST service. Possible values include: 'Anonymous', 'Basic', + 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. @@ -92,16 +90,16 @@ class RestServiceLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RestServiceLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None): + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py deleted file mode 100644 index 9af9f609e52b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py +++ /dev/null @@ -1,107 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server - side SSL certificate when connecting to the endpoint.The default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to - connect to the REST service. Possible values include: 'Anonymous', - 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. - :type user_name: object - :param password: The password used in Basic authentication type. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param service_principal_id: The application's client ID used in - AadServicePrincipal authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in - AadServicePrincipal authentication type. - :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in - AadServicePrincipal authentication type under which your application - resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to - use. - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.enable_server_certificate_validation = enable_server_certificate_validation - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py index f32d4d67e427..dfcaa1b03730 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -15,8 +15,6 @@ class RestSource(CopySource): """A copy activity Rest service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class RestSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). @@ -75,12 +73,12 @@ class RestSource(CopySource): 'request_interval': {'key': 'requestInterval', 'type': 'object'}, } - def __init__(self, **kwargs): - super(RestSource, self).__init__(**kwargs) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) - self.http_request_timeout = kwargs.get('http_request_timeout', None) - self.request_interval = kwargs.get('request_interval', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None): + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py deleted file mode 100644 index 5fcbb2f7a76d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class RestSource(CopySource): - """A copy activity Rest service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param request_method: The HTTP method used to call the RESTful API. The - default is GET. Type: string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if - requestMethod is POST. Type: string (or Expression with resultType - string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to - the RESTful API. Type: string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page - requests. Type: string (or Expression with resultType string). - :type pagination_rules: object - :param http_request_timeout: The timeout (TimeSpan) to get an HTTP - response. It is the timeout to get a response, not the timeout to read - response data. Default value: 00:01:40. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type http_request_timeout: object - :param request_interval: The time to await before sending next page - request. - :type request_interval: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'request_method': {'key': 'requestMethod', 'type': 'object'}, - 'request_body': {'key': 'requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, - 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, - 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.http_request_timeout = http_request_timeout - self.request_interval = request_interval - self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py index e6f5b1876259..dd6aa0d11d76 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py @@ -32,7 +32,7 @@ class RetryPolicy(Model): 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, } - def __init__(self, **kwargs): - super(RetryPolicy, self).__init__(**kwargs) - self.count = kwargs.get('count', None) - self.interval_in_seconds = kwargs.get('interval_in_seconds', None) + def __init__(self, count=None, interval_in_seconds=None): + super(RetryPolicy, self).__init__() + self.count = count + self.interval_in_seconds = interval_in_seconds diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py deleted file mode 100644 index b51b87a49938..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RetryPolicy(Model): - """Execution policy for an activity. - - :param count: Maximum ordinary retry attempts. Default is 0. Type: integer - (or Expression with resultType integer), minimum: 0. - :type count: object - :param interval_in_seconds: Interval between retries in seconds. Default - is 30. - :type interval_in_seconds: int - """ - - _validation = { - 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, - } - - _attribute_map = { - 'count': {'key': 'count', 'type': 'object'}, - 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, - } - - def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: - super(RetryPolicy, self).__init__(**kwargs) - self.count = count - self.interval_in_seconds = interval_in_seconds diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py index 9271f7adf029..150161e78a05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py @@ -15,16 +15,14 @@ class RunFilterParameters(Model): """Query parameters for listing runs. - All required parameters must be populated in order to send to Azure. - :param continuation_token: The continuation token for getting the next page of results. Null for first page. :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run - event was updated in 'ISO 8601' format. + :param last_updated_after: The time at or after which the run event was + updated in 'ISO 8601' format. :type last_updated_after: datetime - :param last_updated_before: Required. The time at or before which the run - event was updated in 'ISO 8601' format. + :param last_updated_before: The time at or before which the run event was + updated in 'ISO 8601' format. :type last_updated_before: datetime :param filters: List of filters. :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] @@ -45,10 +43,10 @@ class RunFilterParameters(Model): 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, } - def __init__(self, **kwargs): - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = kwargs.get('continuation_token', None) - self.last_updated_after = kwargs.get('last_updated_after', None) - self.last_updated_before = kwargs.get('last_updated_before', None) - self.filters = kwargs.get('filters', None) - self.order_by = kwargs.get('order_by', None) + def __init__(self, last_updated_after, last_updated_before, continuation_token=None, filters=None, order_by=None): + super(RunFilterParameters, self).__init__() + self.continuation_token = continuation_token + self.last_updated_after = last_updated_after + self.last_updated_before = last_updated_before + self.filters = filters + self.order_by = order_by diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py deleted file mode 100644 index c96e64eb63b3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunFilterParameters(Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next - page of results. Null for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run - event was updated in 'ISO 8601' format. - :type last_updated_after: datetime - :param last_updated_before: Required. The time at or before which the run - event was updated in 'ISO 8601' format. - :type last_updated_before: datetime - :param filters: List of filters. - :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = continuation_token - self.last_updated_after = last_updated_after - self.last_updated_before = last_updated_before - self.filters = filters - self.order_by = order_by diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py index 7d54150a6815..063002c27fdd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -15,22 +15,20 @@ class RunQueryFilter(Model): """Query filter option for listing runs. - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The - allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd - and Status; to query activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd, ActivityType and Status, and to query trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :param operand: Parameter name to be used for filter. The allowed operands + to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to + query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, + ActivityType and Status, and to query trigger runs are TriggerName, + TriggerRunTimestamp and Status. Possible values include: 'PipelineName', + 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', + 'ActivityRunEnd', 'ActivityType', 'TriggerName', 'TriggerRunTimestamp', + 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values - include: 'Equals', 'NotEquals', 'In', 'NotIn' + :param operator: Operator to be used for filter. Possible values include: + 'Equals', 'NotEquals', 'In', 'NotIn' :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. + :param values: List of filter values. :type values: list[str] """ @@ -46,8 +44,8 @@ class RunQueryFilter(Model): 'values': {'key': 'values', 'type': '[str]'}, } - def __init__(self, **kwargs): - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = kwargs.get('operand', None) - self.operator = kwargs.get('operator', None) - self.values = kwargs.get('values', None) + def __init__(self, operand, operator, values): + super(RunQueryFilter, self).__init__() + self.operand = operand + self.operator = operator + self.values = values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py deleted file mode 100644 index 814e7a4b499b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunQueryFilter(Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The - allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd - and Status; to query activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd, ActivityType and Status, and to query trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', - 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' - :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values - include: 'Equals', 'NotEquals', 'In', 'NotIn' - :type operator: str or - ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__(self, *, operand, operator, values, **kwargs) -> None: - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = operand - self.operator = operator - self.values = values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py index 21afabcf215f..596b68dfdac4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py @@ -15,18 +15,16 @@ class RunQueryOrderBy(Model): """An object to provide order by options for listing runs. - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The - allowed parameters to order by for pipeline runs are PipelineName, - RunStart, RunEnd and Status; for activity runs are ActivityName, - ActivityRunStart, ActivityRunEnd and Status; for trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :param order_by: Parameter name to be used for order by. The allowed + parameters to order by for pipeline runs are PipelineName, RunStart, + RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd and Status; for trigger runs are TriggerName, + TriggerRunTimestamp and Status. Possible values include: 'RunStart', + 'RunEnd', 'PipelineName', 'Status', 'ActivityName', 'ActivityRunStart', + 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values - include: 'ASC', 'DESC' + :param order: Sorting order of the parameter. Possible values include: + 'ASC', 'DESC' :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ @@ -40,7 +38,7 @@ class RunQueryOrderBy(Model): 'order': {'key': 'order', 'type': 'str'}, } - def __init__(self, **kwargs): - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = kwargs.get('order_by', None) - self.order = kwargs.get('order', None) + def __init__(self, order_by, order): + super(RunQueryOrderBy, self).__init__() + self.order_by = order_by + self.order = order diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py deleted file mode 100644 index a3ddc8854d47..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class RunQueryOrderBy(Model): - """An object to provide order by options for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param order_by: Required. Parameter name to be used for order by. The - allowed parameters to order by for pipeline runs are PipelineName, - RunStart, RunEnd and Status; for activity runs are ActivityName, - ActivityRunStart, ActivityRunEnd and Status; for trigger runs are - TriggerName, TriggerRunTimestamp and Status. Possible values include: - 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', - 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' - :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values - include: 'ASC', 'DESC' - :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder - """ - - _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, - } - - _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, - } - - def __init__(self, *, order_by, order, **kwargs) -> None: - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = order_by - self.order = order diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py index c644ac664831..00674e19d285 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -15,8 +15,6 @@ class SalesforceLinkedService(LinkedService): """Linked service for Salesforce. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SalesforceLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify @@ -72,11 +70,11 @@ class SalesforceLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceLinkedService, self).__init__(**kwargs) - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None): + super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.encrypted_credential = encrypted_credential self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py deleted file mode 100644 index 05fcea7a3990..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, - for example, 'https://[domain].my.salesforce.com'. Type: string (or - Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.encrypted_credential = encrypted_credential - self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py index 93b4fcdb3d1f..88373e4cfef8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -15,8 +15,6 @@ class SalesforceMarketingCloudLinkedService(LinkedService): """Salesforce Marketing Cloud linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,11 +29,10 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param client_id: Required. The client ID associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). + :param client_id: The client ID associated with the Salesforce Marketing + Cloud application. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType @@ -80,12 +77,12 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py deleted file mode 100644 index d7e09e27a43f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param client_id: Required. The client ID associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_id: object - :param client_secret: The client secret associated with the Salesforce - Marketing Cloud application. Type: string (or Expression with resultType - string). - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. Type: boolean (or - Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. Type: - boolean (or Expression with resultType boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py index 20f581ce1c50..cf0998c76fcc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py @@ -15,8 +15,6 @@ class SalesforceMarketingCloudObjectDataset(Dataset): """Salesforce Marketing Cloud dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SalesforceMarketingCloudObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class SalesforceMarketingCloudObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py deleted file mode 100644 index 526ac806649f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py index 09a0eca1758e..fd375ee3cb7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -15,8 +15,6 @@ class SalesforceMarketingCloudSource(CopySource): """A copy activity Salesforce Marketing Cloud source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SalesforceMarketingCloudSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SalesforceMarketingCloudSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceMarketingCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py deleted file mode 100644 index 9b898af0c3a1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SalesforceMarketingCloudSource(CopySource): - """A copy activity Salesforce Marketing Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py index 10cfce97fe0f..53ebb3201608 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py @@ -15,8 +15,6 @@ class SalesforceObjectDataset(Dataset): """The Salesforce object dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SalesforceObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SalesforceObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class SalesforceObjectDataset(Dataset): 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceObjectDataset, self).__init__(**kwargs) - self.object_api_name = kwargs.get('object_api_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None): + super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.object_api_name = object_api_name self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py deleted file mode 100644 index 3c3f75d6059e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: - super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.object_api_name = object_api_name - self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py index fb6476ac9a30..e96d6c64c36b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py @@ -15,8 +15,6 @@ class SalesforceServiceCloudLinkedService(LinkedService): """Linked service for Salesforce Service Cloud. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, @@ -76,12 +74,12 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None): + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py deleted file mode 100644 index 3f0b3cc64d91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param environment_url: The URL of Salesforce Service Cloud instance. - Default is 'https://login.salesforce.com'. To copy data from sandbox, - specify 'https://test.salesforce.com'. To copy data from custom domain, - specify, for example, 'https://[domain].my.salesforce.com'. Type: string - (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce - instance. Type: string (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce - instance. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access - Salesforce instance. - :type security_token: ~azure.mgmt.datafactory.models.SecretBase - :param extended_properties: Extended properties appended to the connection - string. Type: string (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: - super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.extended_properties = extended_properties - self.encrypted_credential = encrypted_credential - self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py index 1f5cb3bb5bf1..2bd8b554c0a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py @@ -15,8 +15,6 @@ class SalesforceServiceCloudObjectDataset(Dataset): """The Salesforce Service Cloud object dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SalesforceServiceCloudObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SalesforceServiceCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class SalesforceServiceCloudObjectDataset(Dataset): 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.object_api_name = kwargs.get('object_api_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None): + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.object_api_name = object_api_name self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py deleted file mode 100644 index d215f5f0084d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param object_api_name: The Salesforce Service Cloud object API name. - Type: string (or Expression with resultType string). - :type object_api_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.object_api_name = object_api_name - self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py index 99e2b1a2c924..f7ea135bf5d0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py @@ -15,8 +15,6 @@ class SalesforceServiceCloudSink(CopySink): """A copy activity Salesforce Service Cloud sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: 'Insert', 'Upsert' @@ -76,9 +74,9 @@ class SalesforceServiceCloudSink(CopySink): 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceServiceCloudSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None): + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py deleted file mode 100644 index 2abfaa12d0e7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SalesforceServiceCloudSink(CopySink): - """A copy activity Salesforce Service Cloud sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values - self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py index 255bfab477bc..378a3d06e58e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py @@ -15,8 +15,6 @@ class SalesforceServiceCloudSource(CopySource): """A copy activity Salesforce Service Cloud source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SalesforceServiceCloudSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -56,8 +54,8 @@ class SalesforceServiceCloudSource(CopySource): 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SalesforceServiceCloudSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None): + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.read_behavior = read_behavior self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py deleted file mode 100644 index 77bb267f5a47..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.read_behavior = read_behavior - self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 9a1291bd4bfe..2ffe781bacad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -15,8 +15,6 @@ class SalesforceSink(CopySink): """A copy activity Salesforce sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: 'Insert', 'Upsert' @@ -76,9 +74,9 @@ class SalesforceSink(CopySink): 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SalesforceSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) - self.external_id_field_name = kwargs.get('external_id_field_name', None) - self.ignore_null_values = kwargs.get('ignore_null_values', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None): + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py deleted file mode 100644 index 54a56618d01e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SalesforceSink(CopySink): - """A copy activity Salesforce sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior - :param external_id_field_name: The name of the external ID field for - upsert operation. Default value is 'Id' column. Type: string (or - Expression with resultType string). - :type external_id_field_name: object - :param ignore_null_values: The flag indicating whether or not to ignore - null values from input dataset (except key fields) during write operation. - Default value is false. If set it to true, it means ADF will leave the - data in the destination object unchanged when doing upsert/update - operation and insert defined default value when doing insert operation, - versus ADF will update the data in the destination object to NULL when - doing upsert/update operation and insert NULL value when doing insert - operation. Type: boolean (or Expression with resultType boolean). - :type ignore_null_values: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, - 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values - self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 4f2590c3ab9d..3d076f304a4b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -15,8 +15,6 @@ class SalesforceSource(CopySource): """A copy activity Salesforce source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SalesforceSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -56,8 +54,8 @@ class SalesforceSource(CopySource): 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SalesforceSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.read_behavior = kwargs.get('read_behavior', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None): + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.read_behavior = read_behavior self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py deleted file mode 100644 index 4441e92eaff3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ /dev/null @@ -1,63 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SalesforceSource(CopySource): - """A copy activity Salesforce source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.read_behavior = read_behavior - self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py index 048d26f85696..42d8ffe89f77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py @@ -15,8 +15,6 @@ class SapBwCubeDataset(Dataset): """The SAP BW cube dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SapBwCubeDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SapBwCubeDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -50,18 +48,6 @@ class SapBwCubeDataset(Dataset): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SapBwCubeDataset, self).__init__(**kwargs) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py deleted file mode 100644 index 08334a824ba4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py index a57164c7215d..cc11ab6d60b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -15,8 +15,6 @@ class SapBWLinkedService(LinkedService): """SAP Business Warehouse Linked Service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,18 +29,18 @@ class SapBWLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param server: Required. Host name of the SAP BW instance. Type: string - (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a - two-digit decimal number represented as a string.) Type: string (or + :param server: Host name of the SAP BW instance. Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the BW system. (Usually a two-digit + decimal number represented as a string.) Type: string (or Expression with + resultType string). :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. - (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). + :param client_id: Client ID of the client on the BW system. (Usually a + three-digit decimal number represented as a string) Type: string (or + Expression with resultType string). :type client_id: object :param user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). @@ -77,12 +75,12 @@ class SapBWLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapBWLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, system_number, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None): + super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py deleted file mode 100644 index 92aef25dc215..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance. Type: string - (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a - two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. - (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py index e3762d8e694e..eedd7d2dd4c0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py @@ -15,8 +15,6 @@ class SapBwSource(CopySource): """A copy activity source for SapBW server via MDX. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SapBwSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: MDX query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SapBwSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapBwSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py deleted file mode 100644 index ed6ff734742d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapBwSource(CopySource): - """A copy activity source for SapBW server via MDX. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: MDX query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py index 53d47ab8ae41..92f6867357ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -15,8 +15,6 @@ class SapCloudForCustomerLinkedService(LinkedService): """Linked service for SAP Cloud for Customer. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,11 +29,11 @@ class SapCloudForCustomerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The URL of SAP Cloud for Customer OData API. For - example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: - string (or Expression with resultType string). + :param url: The URL of SAP Cloud for Customer OData API. For example, + '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string + (or Expression with resultType string). :type url: object :param username: The username for Basic authentication. Type: string (or Expression with resultType string). @@ -67,10 +65,10 @@ class SapCloudForCustomerLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None): + super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py deleted file mode 100644 index 9e47fd696503..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP Cloud for Customer OData API. For - example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: - string (or Expression with resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: object - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py index 436b251207a4..842721ddf1e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py @@ -15,8 +15,6 @@ class SapCloudForCustomerResourceDataset(Dataset): """The path of the SAP Cloud for Customer OData entity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SapCloudForCustomerResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class SapCloudForCustomerResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param path: Required. The path of the SAP Cloud for Customer OData - entity. Type: string (or Expression with resultType string). + :param path: The path of the SAP Cloud for Customer OData entity. Type: + string (or Expression with resultType string). :type path: object """ @@ -67,7 +65,7 @@ class SapCloudForCustomerResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, path, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.path = path self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py deleted file mode 100644 index 455bad7c9095..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP Cloud for Customer OData - entity. Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index e5a37858abb5..f7e8379fabea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -15,8 +15,6 @@ class SapCloudForCustomerSink(CopySink): """A copy activity SAP Cloud for Customer sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: 'Insert', 'Update' @@ -61,7 +59,7 @@ class SapCloudForCustomerSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SapCloudForCustomerSink, self).__init__(**kwargs) - self.write_behavior = kwargs.get('write_behavior', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None): + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py deleted file mode 100644 index 29f01fdd6891..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SapCloudForCustomerSink(CopySink): - """A copy activity SAP Cloud for Customer sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.write_behavior = write_behavior - self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py index 561c1b342f93..8a21540f5e4d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -15,8 +15,6 @@ class SapCloudForCustomerSource(CopySource): """A copy activity source for SAP Cloud for Customer source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SapCloudForCustomerSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SapCloudForCustomerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapCloudForCustomerSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py deleted file mode 100644 index e9dab6ad1899..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapCloudForCustomerSource(CopySource): - """A copy activity source for SAP Cloud for Customer source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP Cloud for Customer OData query. For example, "$top=1". - Type: string (or Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py index 0ca69242055f..7fbe29c25b15 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -15,8 +15,6 @@ class SapEccLinkedService(LinkedService): """Linked service for SAP ERP Central Component(SAP ECC). - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class SapEccLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param url: Required. The URL of SAP ECC OData API. For example, + :param url: The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). :type url: str @@ -67,10 +65,10 @@ class SapEccLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SapEccLinkedService, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None): + super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py deleted file mode 100644 index 7afd76b8fe09..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or - Expression with resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or - Expression with resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Either encryptedCredential or username/password must - be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str - """ - - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: - super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py index f79367f49b3d..d893907b4205 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -15,8 +15,6 @@ class SapEccResourceDataset(Dataset): """The path of the SAP ECC OData entity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SapEccResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class SapEccResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param path: Required. The path of the SAP ECC OData entity. Type: string - (or Expression with resultType string). + :param path: The path of the SAP ECC OData entity. Type: string (or + Expression with resultType string). :type path: object """ @@ -67,7 +65,7 @@ class SapEccResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapEccResourceDataset, self).__init__(**kwargs) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, path, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.path = path self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py deleted file mode 100644 index 76aaeb9bb9f2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param path: Required. The path of the SAP ECC OData entity. Type: string - (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.path = path - self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 6379c33713d4..3a609576d56b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -15,8 +15,6 @@ class SapEccSource(CopySource): """A copy activity source for SAP ECC source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SapEccSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SapEccSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapEccSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py deleted file mode 100644 index 4412cac39960..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapEccSource(CopySource): - """A copy activity source for SAP ECC source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP ECC OData query. For example, "$top=1". Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index 14eda87b7be6..ad8c49f859be 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -15,8 +15,6 @@ class SapHanaLinkedService(LinkedService): """SAP HANA Linked Service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,13 @@ class SapHanaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). + :param server: Host name of the SAP HANA server. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' @@ -74,12 +72,12 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapHanaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None): + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py deleted file mode 100644 index de378a5b2bf3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: SAP HANA ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect - to the SAP HANA server. Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py index e946dbcd9a50..d4ee824fcd4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py @@ -15,8 +15,6 @@ class SapHanaSource(CopySource): """A copy activity source for SAP HANA source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SapHanaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). @@ -55,8 +53,8 @@ class SapHanaSource(CopySource): 'packet_size': {'key': 'packetSize', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapHanaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.packet_size = kwargs.get('packet_size', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None): + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.packet_size = packet_size self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py deleted file mode 100644 index 730326c19183..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapHanaSource(CopySource): - """A copy activity source for SAP HANA source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: SAP HANA Sql query. Type: string (or Expression with - resultType string). - :type query: object - :param packet_size: The packet size of data read from SAP HANA. Type: - integer(or Expression with resultType integer). - :type packet_size: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'packet_size': {'key': 'packetSize', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.packet_size = packet_size - self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py index 6ff1ae31cd22..8568b4e98459 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py @@ -15,8 +15,6 @@ class SapHanaTableDataset(Dataset): """SAP HANA Table properties. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SapHanaTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SapHanaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). @@ -70,8 +68,8 @@ class SapHanaTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapHanaTableDataset, self).__init__(**kwargs) - self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None): + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py deleted file mode 100644 index 6dc5c48ba21d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: - string (or Expression with resultType string). - :type sap_hana_table_dataset_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: - super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema - self.table = table - self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py index bfe9c323d302..c133b84f7118 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -15,8 +15,6 @@ class SapOpenHubLinkedService(LinkedService): """SAP Business Warehouse Open Hub Destination Linked Service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,19 +29,18 @@ class SapOpenHubLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param server: Required. Host name of the SAP BW instance where the open - hub destination is located. Type: string (or Expression with resultType + :param server: Host name of the SAP BW instance where the open hub + destination is located. Type: string (or Expression with resultType string). :type server: object - :param system_number: Required. System number of the BW system where the - open hub destination is located. (Usually a two-digit decimal number - represented as a string.) Type: string (or Expression with resultType - string). + :param system_number: System number of the BW system where the open hub + destination is located. (Usually a two-digit decimal number represented as + a string.) Type: string (or Expression with resultType string). :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where - the open hub destination is located. (Usually a three-digit decimal number + :param client_id: Client ID of the client on the BW system where the open + hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). :type client_id: object @@ -87,13 +84,13 @@ class SapOpenHubLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, system_number, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None): + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py deleted file mode 100644 index eddc50b0f1c5..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Host name of the SAP BW instance where the open - hub destination is located. Type: string (or Expression with resultType - string). - :type server: object - :param system_number: Required. System number of the BW system where the - open hub destination is located. (Usually a two-digit decimal number - represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where - the open hub destination is located. (Usually a three-digit decimal number - represented as a string) Type: string (or Expression with resultType - string). - :type client_id: object - :param language: Language of the BW system where the open hub destination - is located. The default value is EN. Type: string (or Expression with - resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub - destination is located. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to access the SAP BW server where the open hub - destination is located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py index d6dcbda60e36..8178a134edae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -16,8 +16,6 @@ class SapOpenHubSource(CopySource): """A copy activity source for SAP Business Warehouse Open Hub Destination source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -32,7 +30,7 @@ class SapOpenHubSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with @@ -59,8 +57,8 @@ class SapOpenHubSource(CopySource): 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapOpenHubSource, self).__init__(**kwargs) - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None): + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py deleted file mode 100644 index 752ffd8554b0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapOpenHubSource(CopySource): - """A copy activity source for SAP Business Warehouse Open Hub Destination - source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py index 2682969c5016..0f5c97b8ec6b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -15,8 +15,6 @@ class SapOpenHubTableDataset(Dataset): """Sap Business Warehouse Open Hub Destination Table properties. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SapOpenHubTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,11 +39,11 @@ class SapOpenHubTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param open_hub_destination_name: Required. The name of the Open Hub - Destination with destination type as Database Table. Type: string (or - Expression with resultType string). + :param open_hub_destination_name: The name of the Open Hub Destination + with destination type as Database Table. Type: string (or Expression with + resultType string). :type open_hub_destination_name: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with @@ -79,9 +77,9 @@ class SapOpenHubTableDataset(Dataset): 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) + def __init__(self, linked_service_name, open_hub_destination_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None): + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py deleted file mode 100644 index b06a53c10db3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py +++ /dev/null @@ -1,87 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param open_hub_destination_name: Required. The name of the Open Hub - Destination with destination type as Database Table. Type: string (or - Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last - request. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is - set, only data with requestId larger than the value of this property will - be retrieved. The default value is 0. Type: integer (or Expression with - resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.open_hub_destination_name = open_hub_destination_name - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py index 83b76d0a4fdd..3a33f5c8e614 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py @@ -15,8 +15,6 @@ class SapTableLinkedService(LinkedService): """SAP Table Linked Service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SapTableLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). @@ -119,22 +117,22 @@ class SapTableLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapTableLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.system_id = kwargs.get('system_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.message_server = kwargs.get('message_server', None) - self.message_server_service = kwargs.get('message_server_service', None) - self.snc_mode = kwargs.get('snc_mode', None) - self.snc_my_name = kwargs.get('snc_my_name', None) - self.snc_partner_name = kwargs.get('snc_partner_name', None) - self.snc_library_path = kwargs.get('snc_library_path', None) - self.snc_qop = kwargs.get('snc_qop', None) - self.logon_group = kwargs.get('logon_group', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None): + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py deleted file mode 100644 index d098acc1bbda..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py +++ /dev/null @@ -1,140 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Host name of the SAP instance where the table is located. - Type: string (or Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is - located. (Usually a two-digit decimal number represented as a string.) - Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the - table is located. (Usually a three-digit decimal number represented as a - string) Type: string (or Expression with resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. - The default value is EN. Type: string (or Expression with resultType - string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. - Type: string (or Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is - located. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is - located. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: - string (or Expression with resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the - Message Server. Type: string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where - the table is located. Must be either 0 (off) or 1 (on). Type: string (or - Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where - the table is located. Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the - SAP server where the table is located. Type: string (or Expression with - resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, - 8, 9. Type: string (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or - Expression with resultType string). - :type logon_group: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: - super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.system_id = system_id - self.user_name = user_name - self.password = password - self.message_server = message_server - self.message_server_service = message_server_service - self.snc_mode = snc_mode - self.snc_my_name = snc_my_name - self.snc_partner_name = snc_partner_name - self.snc_library_path = snc_library_path - self.snc_qop = snc_qop - self.logon_group = logon_group - self.encrypted_credential = encrypted_credential - self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py index b688fe16683b..febdbc581f07 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -39,9 +39,9 @@ class SapTablePartitionSettings(Model): 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) - self.max_partitions_number = kwargs.get('max_partitions_number', None) + def __init__(self, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None): + super(SapTablePartitionSettings, self).__init__() + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py deleted file mode 100644 index 37bdf610ab35..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py +++ /dev/null @@ -1,47 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SapTablePartitionSettings(Model): - """The settings that will be leveraged for SAP table source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range partitioning. Type: string (or Expression with resultType - string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - :param max_partitions_number: The maximum value of partitions the table - will be split into. Type: integer (or Expression with resultType string). - :type max_partitions_number: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: - super(SapTablePartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py index 24601ba6b793..1fe2274850ad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py @@ -15,8 +15,6 @@ class SapTableResourceDataset(Dataset): """SAP Table Resource properties. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SapTableResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class SapTableResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param table_name: Required. The name of the SAP Table. Type: string (or - Expression with resultType string). + :param table_name: The name of the SAP Table. Type: string (or Expression + with resultType string). :type table_name: object """ @@ -67,7 +65,7 @@ class SapTableResourceDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SapTableResourceDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, table_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py deleted file mode 100644 index 7b034ccd3a91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: Required. The name of the SAP Table. Type: string (or - Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'table_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: - super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py index 35799515440e..79173bd615ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -15,8 +15,6 @@ class SapTableSource(CopySource): """A copy activity source for SAP Table source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SapTableSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). @@ -87,14 +85,14 @@ class SapTableSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, **kwargs): - super(SapTableSource, self).__init__(**kwargs) - self.row_count = kwargs.get('row_count', None) - self.row_skips = kwargs.get('row_skips', None) - self.rfc_table_fields = kwargs.get('rfc_table_fields', None) - self.rfc_table_options = kwargs.get('rfc_table_options', None) - self.batch_size = kwargs.get('batch_size', None) - self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None): + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py deleted file mode 100644 index bed7bbb93932..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py +++ /dev/null @@ -1,100 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SapTableSource(CopySource): - """A copy activity source for SAP Table source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param row_count: The number of rows to be retrieved. Type: integer(or - Expression with resultType integer). - :type row_count: object - :param row_skips: The number of rows that will be skipped. Type: integer - (or Expression with resultType integer). - :type row_skips: object - :param rfc_table_fields: The fields of the SAP table that will be - retrieved. For example, column0, column1. Type: string (or Expression with - resultType string). - :type rfc_table_fields: object - :param rfc_table_options: The options for the filtering of the SAP Table. - For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with - resultType string). - :type rfc_table_options: object - :param batch_size: Specifies the maximum number of rows that will be - retrieved at a time when retrieving data from SAP Table. Type: integer (or - Expression with resultType integer). - :type batch_size: object - :param custom_rfc_read_table_function_module: Specifies the custom RFC - function module that will be used to read data from SAP Table. Type: - string (or Expression with resultType string). - :type custom_rfc_read_table_function_module: object - :param partition_option: The partition mechanism that will be used for SAP - table read in parallel. Possible values include: 'None', 'PartitionOnInt', - 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', - 'PartitionOnCalendarDate', 'PartitionOnTime' - :type partition_option: str or - ~azure.mgmt.datafactory.models.SapTablePartitionOption - :param partition_settings: The settings that will be leveraged for SAP - table source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.SapTablePartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'row_count': {'key': 'rowCount', 'type': 'object'}, - 'row_skips': {'key': 'rowSkips', 'type': 'object'}, - 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, - 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, - 'batch_size': {'key': 'batchSize', 'type': 'object'}, - 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.row_count = row_count - self.row_skips = row_skips - self.rfc_table_fields = rfc_table_fields - self.rfc_table_options = rfc_table_options - self.batch_size = batch_size - self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py index b9ea331b8c6e..2c4ef68ae485 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -18,8 +18,6 @@ class ScheduleTrigger(MultiplePipelineTrigger): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -33,12 +31,12 @@ class ScheduleTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. + :param recurrence: Recurrence schedule configuration. :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ @@ -58,7 +56,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } - def __init__(self, **kwargs): - super(ScheduleTrigger, self).__init__(**kwargs) - self.recurrence = kwargs.get('recurrence', None) + def __init__(self, recurrence, additional_properties=None, description=None, annotations=None, pipelines=None): + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines) + self.recurrence = recurrence self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py deleted file mode 100644 index f13f01c7fa13..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger - - -class ScheduleTrigger(MultiplePipelineTrigger): - """Trigger that creates pipeline runs periodically, on schedule. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipelines: Pipelines that need to be started. - :type pipelines: - list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Required. Recurrence schedule configuration. - :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'recurrence': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, - 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, - } - - def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) - self.recurrence = recurrence - self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py index 85408c45547b..021ad0afeb80 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py @@ -43,12 +43,12 @@ class ScheduleTriggerRecurrence(Model): 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, } - def __init__(self, **kwargs): - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.time_zone = kwargs.get('time_zone', None) - self.schedule = kwargs.get('schedule', None) + def __init__(self, additional_properties=None, frequency=None, interval=None, start_time=None, end_time=None, time_zone=None, schedule=None): + super(ScheduleTriggerRecurrence, self).__init__() + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py deleted file mode 100644 index a9b6eded7b96..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ScheduleTriggerRecurrence(Model): - """The workflow trigger recurrence. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param frequency: The frequency. Possible values include: 'NotSpecified', - 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' - :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency - :param interval: The interval. - :type interval: int - :param start_time: The start time. - :type start_time: datetime - :param end_time: The end time. - :type end_time: datetime - :param time_zone: The time zone. - :type time_zone: str - :param schedule: The recurrence schedule. - :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: - super(ScheduleTriggerRecurrence, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.time_zone = time_zone - self.schedule = schedule diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py index 50bc0131a5cf..b4f0a8291f33 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py @@ -15,14 +15,12 @@ class ScriptAction(Model): """Custom script action to run on HDI ondemand cluster once it's up. - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. + :param name: The user provided name of the script action. :type name: str - :param uri: Required. The URI for the script action. + :param uri: The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should - be executed. + :param roles: The node types on which the script action should be + executed. :type roles: object :param parameters: The parameters for the script action. :type parameters: str @@ -41,9 +39,9 @@ class ScriptAction(Model): 'parameters': {'key': 'parameters', 'type': 'str'}, } - def __init__(self, **kwargs): - super(ScriptAction, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.uri = kwargs.get('uri', None) - self.roles = kwargs.get('roles', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, name, uri, roles, parameters=None): + super(ScriptAction, self).__init__() + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py deleted file mode 100644 index c0e278073219..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py +++ /dev/null @@ -1,49 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class ScriptAction(Model): - """Custom script action to run on HDI ondemand cluster once it's up. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should - be executed. - :type roles: object - :param parameters: The parameters for the script action. - :type parameters: str - """ - - _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'object'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, - } - - def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: - super(ScriptAction, self).__init__(**kwargs) - self.name = name - self.uri = uri - self.roles = roles - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py index 3d9475dd4382..e7875d601f55 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py @@ -18,9 +18,7 @@ class SecretBase(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: SecureString, AzureKeyVaultSecretReference - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -36,6 +34,6 @@ class SecretBase(Model): 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} } - def __init__(self, **kwargs): - super(SecretBase, self).__init__(**kwargs) + def __init__(self): + super(SecretBase, self).__init__() self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py deleted file mode 100644 index 29403e61b245..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SecretBase(Model): - """The base definition of a secret type. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SecureString, AzureKeyVaultSecretReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} - } - - def __init__(self, **kwargs) -> None: - super(SecretBase, self).__init__(**kwargs) - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py index bec430fdf8a4..3cc6e7630fca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py @@ -16,11 +16,9 @@ class SecureString(SecretBase): """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param value: Required. Value of secure string. + :param value: Value of secure string. :type value: str """ @@ -34,7 +32,7 @@ class SecureString(SecretBase): 'value': {'key': 'value', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SecureString, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + def __init__(self, value): + super(SecureString, self).__init__() + self.value = value self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py deleted file mode 100644 index d7ebd5e13e78..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .secret_base_py3 import SecretBase - - -class SecureString(SecretBase): - """Azure Data Factory secure string definition. The string value will be - masked with asterisks '*' during Get or List API calls. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__(self, *, value: str, **kwargs) -> None: - super(SecureString, self).__init__(**kwargs) - self.value = value - self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py index fc56f8e8a799..b4126035d4f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py @@ -15,12 +15,10 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): """Self referenced tumbling window trigger dependency. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling - window when evaluating dependency. + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. :type offset: str :param size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. @@ -39,8 +37,8 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): 'size': {'key': 'size', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) + def __init__(self, offset, size=None): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__() + self.offset = offset + self.size = size self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py deleted file mode 100644 index 1dd1e575c2e8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dependency_reference_py3 import DependencyReference - - -class SelfDependencyTumblingWindowTriggerReference(DependencyReference): - """Self referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param offset: Required. Timespan applied to the start time of a tumbling - window when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: - super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) - self.offset = offset - self.size = size - self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py index 20744f02306d..13a81ce89a6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py @@ -15,14 +15,12 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): """Self-hosted integration runtime. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_info: :type linked_info: @@ -40,7 +38,7 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, } - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) - self.linked_info = kwargs.get('linked_info', None) + def __init__(self, additional_properties=None, description=None, linked_info=None): + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description) + self.linked_info = linked_info self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py index 1491a80dc19a..c0f9705bd12a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py @@ -116,9 +116,9 @@ class SelfHostedIntegrationRuntimeNode(Model): 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, } - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(SelfHostedIntegrationRuntimeNode, self).__init__() + self.additional_properties = additional_properties self.node_name = None self.machine_name = None self.host_service_uri = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py deleted file mode 100644 index 59b703737a5d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SelfHostedIntegrationRuntimeNode(Model): - """Properties of Self-hosted integration runtime node. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar node_name: Name of the integration runtime node. - :vartype node_name: str - :ivar machine_name: Machine name of the integration runtime node. - :vartype machine_name: str - :ivar host_service_uri: URI for the host machine of the integration - runtime. - :vartype host_service_uri: str - :ivar status: Status of the integration runtime node. Possible values - include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', - 'Initializing', 'InitializeFailed' - :vartype status: str or - ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus - :ivar capabilities: The integration runtime capabilities dictionary - :vartype capabilities: dict[str, str] - :ivar version_status: Status of the integration runtime node version. - :vartype version_status: str - :ivar version: Version of the integration runtime node. - :vartype version: str - :ivar register_time: The time at which the integration runtime node was - registered in ISO8601 format. - :vartype register_time: datetime - :ivar last_connect_time: The most recent time at which the integration - runtime was connected in ISO8601 format. - :vartype last_connect_time: datetime - :ivar expiry_time: The time at which the integration runtime will expire - in ISO8601 format. - :vartype expiry_time: datetime - :ivar last_start_time: The time the node last started up. - :vartype last_start_time: datetime - :ivar last_stop_time: The integration runtime node last stop time. - :vartype last_stop_time: datetime - :ivar last_update_result: The result of the last integration runtime node - update. Possible values include: 'None', 'Succeed', 'Fail' - :vartype last_update_result: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult - :ivar last_start_update_time: The last time for the integration runtime - node update start. - :vartype last_start_update_time: datetime - :ivar last_end_update_time: The last time for the integration runtime node - update end. - :vartype last_end_update_time: datetime - :ivar is_active_dispatcher: Indicates whether this node is the active - dispatcher for integration runtime requests. - :vartype is_active_dispatcher: bool - :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration - runtime node. - :vartype concurrent_jobs_limit: int - :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration - runtime. - :vartype max_concurrent_jobs: int - """ - - _validation = { - 'node_name': {'readonly': True}, - 'machine_name': {'readonly': True}, - 'host_service_uri': {'readonly': True}, - 'status': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'version_status': {'readonly': True}, - 'version': {'readonly': True}, - 'register_time': {'readonly': True}, - 'last_connect_time': {'readonly': True}, - 'expiry_time': {'readonly': True}, - 'last_start_time': {'readonly': True}, - 'last_stop_time': {'readonly': True}, - 'last_update_result': {'readonly': True}, - 'last_start_update_time': {'readonly': True}, - 'last_end_update_time': {'readonly': True}, - 'is_active_dispatcher': {'readonly': True}, - 'concurrent_jobs_limit': {'readonly': True}, - 'max_concurrent_jobs': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'node_name': {'key': 'nodeName', 'type': 'str'}, - 'machine_name': {'key': 'machineName', 'type': 'str'}, - 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '{str}'}, - 'version_status': {'key': 'versionStatus', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, - 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, - 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, - 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, - 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, - 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, - 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.node_name = None - self.machine_name = None - self.host_service_uri = None - self.status = None - self.capabilities = None - self.version_status = None - self.version = None - self.register_time = None - self.last_connect_time = None - self.expiry_time = None - self.last_start_time = None - self.last_stop_time = None - self.last_update_result = None - self.last_start_update_time = None - self.last_end_update_time = None - self.is_active_dispatcher = None - self.concurrent_jobs_limit = None - self.max_concurrent_jobs = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py deleted file mode 100644 index a25d04373849..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_py3 import IntegrationRuntime - - -class SelfHostedIntegrationRuntime(IntegrationRuntime): - """Self-hosted integration runtime. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Integration runtime description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param linked_info: - :type linked_info: - ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, - } - - def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) - self.linked_info = linked_info - self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py index 5dd9995987d9..25226a256900 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py @@ -18,8 +18,6 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. @@ -125,13 +123,13 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, } - def __init__(self, **kwargs): - super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) + def __init__(self, additional_properties=None, nodes=None, links=None): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties) self.create_time = None self.task_queue_id = None self.internal_channel_encryption = None self.version = None - self.nodes = kwargs.get('nodes', None) + self.nodes = nodes self.scheduled_update_date = None self.update_delay_offset = None self.local_time_zone_offset = None @@ -139,7 +137,7 @@ def __init__(self, **kwargs): self.service_urls = None self.auto_update = None self.version_status = None - self.links = kwargs.get('links', None) + self.links = links self.pushed_version = None self.latest_version = None self.auto_update_eta = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py deleted file mode 100644 index acad7661fc15..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py +++ /dev/null @@ -1,146 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .integration_runtime_status_py3 import IntegrationRuntimeStatus - - -class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): - """Self-hosted integration runtime status. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar data_factory_name: The data factory name which the integration - runtime belong to. - :vartype data_factory_name: str - :ivar state: The state of integration runtime. Possible values include: - 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', - 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' - :vartype state: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Required. Constant filled by server. - :type type: str - :ivar create_time: The time at which the integration runtime was created, - in ISO8601 format. - :vartype create_time: datetime - :ivar task_queue_id: The task queue id of the integration runtime. - :vartype task_queue_id: str - :ivar internal_channel_encryption: It is used to set the encryption mode - for node-node communication channel (when more than 2 self-hosted - integration runtime nodes exist). Possible values include: 'NotSet', - 'SslEncrypted', 'NotEncrypted' - :vartype internal_channel_encryption: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode - :ivar version: Version of the integration runtime. - :vartype version: str - :param nodes: The list of nodes for this integration runtime. - :type nodes: - list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] - :ivar scheduled_update_date: The date at which the integration runtime - will be scheduled to update, in ISO8601 format. - :vartype scheduled_update_date: datetime - :ivar update_delay_offset: The time in the date scheduled by service to - update the integration runtime, e.g., PT03H is 3 hours - :vartype update_delay_offset: str - :ivar local_time_zone_offset: The local time zone offset in hours. - :vartype local_time_zone_offset: str - :ivar capabilities: Object with additional information about integration - runtime capabilities. - :vartype capabilities: dict[str, str] - :ivar service_urls: The URLs for the services used in integration runtime - backend service. - :vartype service_urls: list[str] - :ivar auto_update: Whether Self-hosted integration runtime auto update has - been turned on. Possible values include: 'On', 'Off' - :vartype auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :ivar version_status: Status of the integration runtime version. - :vartype version_status: str - :param links: The list of linked integration runtimes that are created to - share with this integration runtime. - :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] - :ivar pushed_version: The version that the integration runtime is going to - update to. - :vartype pushed_version: str - :ivar latest_version: The latest version on download center. - :vartype latest_version: str - :ivar auto_update_eta: The estimated time when the self-hosted integration - runtime will be updated. - :vartype auto_update_eta: datetime - """ - - _validation = { - 'data_factory_name': {'readonly': True}, - 'state': {'readonly': True}, - 'type': {'required': True}, - 'create_time': {'readonly': True}, - 'task_queue_id': {'readonly': True}, - 'internal_channel_encryption': {'readonly': True}, - 'version': {'readonly': True}, - 'scheduled_update_date': {'readonly': True}, - 'update_delay_offset': {'readonly': True}, - 'local_time_zone_offset': {'readonly': True}, - 'capabilities': {'readonly': True}, - 'service_urls': {'readonly': True}, - 'auto_update': {'readonly': True}, - 'version_status': {'readonly': True}, - 'pushed_version': {'readonly': True}, - 'latest_version': {'readonly': True}, - 'auto_update_eta': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, - 'state': {'key': 'state', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, - 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, - 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, - 'version': {'key': 'typeProperties.version', 'type': 'str'}, - 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, - 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, - 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, - 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, - 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, - 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, - 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, - 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, - 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, - 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, - 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, - 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, - } - - def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: - super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) - self.create_time = None - self.task_queue_id = None - self.internal_channel_encryption = None - self.version = None - self.nodes = nodes - self.scheduled_update_date = None - self.update_delay_offset = None - self.local_time_zone_offset = None - self.capabilities = None - self.service_urls = None - self.auto_update = None - self.version_status = None - self.links = links - self.pushed_version = None - self.latest_version = None - self.auto_update_eta = None - self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py index 4d42f575e769..ed4f6b5f4e1d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -15,8 +15,6 @@ class ServiceNowLinkedService(LinkedService): """ServiceNow server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,13 @@ class ServiceNowLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :param endpoint: The endpoint of the ServiceNow server. (i.e. .service-now.com) :type endpoint: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Basic', 'OAuth2' + :param authentication_type: The authentication type to use. Possible + values include: 'Basic', 'OAuth2' :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server @@ -91,16 +89,16 @@ class ServiceNowLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ServiceNowLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.authentication_type = authentication_type + self.username = username + self.password = password + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py deleted file mode 100644 index b9d166f241d6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - .service-now.com) - :type endpoint: object - :param authentication_type: Required. The authentication type to use. - Possible values include: 'Basic', 'OAuth2' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server - for Basic and OAuth2 authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and - OAuth2 authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.authentication_type = authentication_type - self.username = username - self.password = password - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py index a9821ba0fd10..eb2785201321 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py @@ -15,8 +15,6 @@ class ServiceNowObjectDataset(Dataset): """ServiceNow server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ServiceNowObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ServiceNowObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class ServiceNowObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py deleted file mode 100644 index fcd2fd537a31..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py index 16b10bb8de5e..d00397e49404 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -15,8 +15,6 @@ class ServiceNowSource(CopySource): """A copy activity ServiceNow server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ServiceNowSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ServiceNowSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ServiceNowSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py deleted file mode 100644 index 20d1a64d04d3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ServiceNowSource(CopySource): - """A copy activity ServiceNow server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py index e8dd1690862d..05adf0210720 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py @@ -15,12 +15,10 @@ class SetVariableActivity(ControlActivity): """Set value for a Variable. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class SetVariableActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str @@ -52,8 +50,8 @@ class SetVariableActivity(ControlActivity): 'value': {'key': 'typeProperties.value', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SetVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) + def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, variable_name=None, value=None): + super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.variable_name = variable_name + self.value = value self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py deleted file mode 100644 index e045abee3dfb..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class SetVariableActivity(ControlActivity): - """Set value for a Variable. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, - } - - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: - super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.variable_name = variable_name - self.value = value - self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py index 5b8fd4e42ba2..49a19e5398f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -15,12 +15,10 @@ class SftpLocation(DatasetLocation): """The location of SFTP dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. + :param type: Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -34,12 +32,5 @@ class SftpLocation(DatasetLocation): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(SftpLocation, self).__init__(**kwargs) + def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py deleted file mode 100644 index c5e2feafa971..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_location_py3 import DatasetLocation - - -class SftpLocation(DatasetLocation): - """The location of SFTP dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py index 5e7b4faf77ad..8e2df93c4d31 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py @@ -15,12 +15,10 @@ class SftpReadSettings(StoreReadSettings): """Sftp read settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -59,10 +57,10 @@ class SftpReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SftpReadSettings, self).__init__(**kwargs) - self.recursive = kwargs.get('recursive', None) - self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) - self.wildcard_file_name = kwargs.get('wildcard_file_name', None) - self.modified_datetime_start = kwargs.get('modified_datetime_start', None) - self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None): + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py deleted file mode 100644 index e6c27e3ad08a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .store_read_settings_py3 import StoreReadSettings - - -class SftpReadSettings(StoreReadSettings): - """Sftp read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). - :type recursive: object - :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or - Expression with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Sftp wildcardFileName. Type: string (or - Expression with resultType string). - :type wildcard_file_name: object - :param modified_datetime_start: The start of file's modified datetime. - Type: string (or Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: - string (or Expression with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py index aa4c535fc514..cc7839f000a7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -15,8 +15,6 @@ class SftpServerLinkedService(LinkedService): """A linked service for an SSH File Transfer Protocol (SFTP) server. . - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,10 @@ class SftpServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The SFTP server host name. Type: string (or - Expression with resultType string). + :param host: The SFTP server host name. Type: string (or Expression with + resultType string). :type host: object :param port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with @@ -103,17 +101,17 @@ class SftpServerLinkedService(LinkedService): 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SftpServerLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.private_key_path = kwargs.get('private_key_path', None) - self.private_key_content = kwargs.get('private_key_content', None) - self.pass_phrase = kwargs.get('pass_phrase', None) - self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) - self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None): + super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.private_key_path = private_key_path + self.private_key_content = private_key_content + self.pass_phrase = pass_phrase + self.skip_host_key_validation = skip_host_key_validation + self.host_key_fingerprint = host_key_fingerprint self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py deleted file mode 100644 index 7decd7781348..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py +++ /dev/null @@ -1,119 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. . - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The SFTP server host name. Type: string (or - Expression with resultType string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for - client connections. Default value is 22. Type: integer (or Expression with - resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect - to the FTP server. Possible values include: 'Basic', 'SshPublicKey' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic - authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey - authentication. Only valid for on-premises copy. For on-premises copy with - SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent - should be specified. SSH private key should be OpenSSH format. Type: - string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for - SshPublicKey authentication. For on-premises copy with SshPublicKey - authentication, either PrivateKeyPath or PrivateKeyContent should be - specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH - private key is encrypted. - :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key - validation. Default value is false. Type: boolean (or Expression with - resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. - When SkipHostKeyValidation is false, HostKeyFingerprint should be - specified. Type: string (or Expression with resultType string). - :type host_key_fingerprint: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: - super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.private_key_path = private_key_path - self.private_key_content = private_key_content - self.pass_phrase = pass_phrase - self.skip_host_key_validation = skip_host_key_validation - self.host_key_fingerprint = host_key_fingerprint - self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py index ee5311dceb7a..d5f4a5fd2e26 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -15,8 +15,6 @@ class ShopifyLinkedService(LinkedService): """Shopify Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class ShopifyLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The endpoint of the Shopify server. (i.e. + :param host: The endpoint of the Shopify server. (i.e. mystore.myshopify.com) :type host: object :param access_token: The API access token that can be used to access @@ -75,12 +73,12 @@ class ShopifyLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ShopifyLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py deleted file mode 100644 index ea6189277552..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py +++ /dev/null @@ -1,86 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Shopify server. (i.e. - mystore.myshopify.com) - :type host: object - :param access_token: The API access token that can be used to access - Shopify’s data. The token won't expire if it is offline mode. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py index ab3e475b9c97..16b4d8caba37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py @@ -15,8 +15,6 @@ class ShopifyObjectDataset(Dataset): """Shopify Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ShopifyObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ShopifyObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class ShopifyObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ShopifyObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py deleted file mode 100644 index 98b9c43c21e8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py index d4596976d459..c5a05de69491 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -15,8 +15,6 @@ class ShopifySource(CopySource): """A copy activity Shopify Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ShopifySource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ShopifySource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ShopifySource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py deleted file mode 100644 index 6b56edd62904..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ShopifySource(CopySource): - """A copy activity Shopify Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py index 4f9ab49a7bba..9eeea5cf6481 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -15,8 +15,6 @@ class SparkLinkedService(LinkedService): """Spark Server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,12 +29,12 @@ class SparkLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. IP address or host name of the Spark server + :param host: IP address or host name of the Spark server :type host: object - :param port: Required. The TCP port that the Spark server uses to listen - for client connections. + :param port: The TCP port that the Spark server uses to listen for client + connections. :type port: object :param server_type: The type of Spark server. Possible values include: 'SharkServer', 'SharkServer2', 'SparkThriftServer' @@ -45,8 +43,8 @@ class SparkLinkedService(LinkedService): Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' :type thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Spark server. Possible values include: 'Anonymous', 'Username', + :param authentication_type: The authentication method used to access the + Spark server. Possible values include: 'Anonymous', 'Username', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType @@ -112,20 +110,20 @@ class SparkLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SparkLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, port, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): + super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py deleted file mode 100644 index f6433b6ab187..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py +++ /dev/null @@ -1,131 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SparkLinkedService(LinkedService): - """Spark Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. IP address or host name of the Spark server - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen - for client connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: - 'SharkServer', 'SharkServer2', 'SparkThriftServer' - :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the - Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' - :type thrift_transport_protocol: str or - ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to - access the Spark server. Possible values include: 'Anonymous', 'Username', - 'UsernameAndPassword', 'WindowsAzureHDInsightService' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you - provided in the Username field - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are - encrypted using SSL. The default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing - trusted CA certificates for verifying the server when connecting over SSL. - This property can only be set when using SSL on self-hosted IR. The - default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate - from the system trust store or from a specified PEM file. The default - value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a - CA-issued SSL certificate name to match the host name of the server when - connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow - self-signed certificates from the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: - super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential - self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py index bdbdf067e1ea..d270a75921f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py @@ -15,8 +15,6 @@ class SparkObjectDataset(Dataset): """Spark Server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SparkObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SparkObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class SparkObjectDataset(Dataset): 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SparkObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None): + super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.spark_object_dataset_schema = spark_object_dataset_schema self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py deleted file mode 100644 index afe383951f1c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SparkObjectDataset(Dataset): - """Spark Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression - with resultType string). - :type table: object - :param spark_object_dataset_schema: The schema name of the Spark. Type: - string (or Expression with resultType string). - :type spark_object_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: - super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.spark_object_dataset_schema = spark_object_dataset_schema - self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py index 6d670c1c6b2a..ed271ea59c92 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -15,8 +15,6 @@ class SparkSource(CopySource): """A copy activity Spark Server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SparkSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SparkSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SparkSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py deleted file mode 100644 index 8da01b0cd823..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SparkSource(CopySource): - """A copy activity Spark Server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py index 8fe57eaa3595..3631e11983d7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -15,8 +15,6 @@ class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). @@ -74,10 +72,10 @@ class SqlDWSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlDWSink, self).__init__(**kwargs) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.allow_poly_base = kwargs.get('allow_poly_base', None) - self.poly_base_settings = kwargs.get('poly_base_settings', None) - self.table_option = kwargs.get('table_option', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None): + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.table_option = table_option self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py deleted file mode 100644 index 6f9241560e59..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlDWSink(CopySink): - """A copy activity SQL Data Warehouse sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param allow_poly_base: Indicates to use PolyBase to copy data into SQL - Data Warehouse when applicable. Type: boolean (or Expression with - resultType boolean). - :type allow_poly_base: object - :param poly_base_settings: Specifies PolyBase-related settings when - allowPolyBase is true. - :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, - 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.pre_copy_script = pre_copy_script - self.allow_poly_base = allow_poly_base - self.poly_base_settings = poly_base_settings - self.table_option = table_option - self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py index 1a020672f7c2..d3e7690a5264 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -15,8 +15,6 @@ class SqlDWSource(CopySource): """A copy activity SQL Data Warehouse source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SqlDWSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). @@ -62,9 +60,9 @@ class SqlDWSource(CopySource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlDWSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None): + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py deleted file mode 100644 index ae8fe605024f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlDWSource(CopySource): - """A copy activity SQL Data Warehouse source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or - Expression with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Data Warehouse source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - Type: object (or Expression with resultType object), itemType: - StoredProcedureParameter. - :type stored_procedure_parameters: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py index 6a11990fc720..cfed228321d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py @@ -15,8 +15,6 @@ class SqlMISink(CopySink): """A copy activity Azure SQL Managed Instance sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -82,12 +80,12 @@ class SqlMISink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlMISink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py deleted file mode 100644 index 16fe41cf47f7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlMISink(CopySink): - """A copy activity Azure SQL Managed Instance sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py index 4d4db9b09281..2a1c93867a30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py @@ -15,8 +15,6 @@ class SqlMISource(CopySource): """A copy activity Azure SQL Managed Instance source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SqlMISource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -64,10 +62,10 @@ class SqlMISource(CopySource): 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlMISource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None): + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py deleted file mode 100644 index 952bc7b4da4f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlMISource(CopySource): - """A copy activity Azure SQL Managed Instance source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a Azure SQL Managed Instance source. This cannot be used at the same time - as SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py index 45d342212ea4..f894634427fe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -15,8 +15,6 @@ class SqlServerLinkedService(LinkedService): """SQL Server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class SqlServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param user_name: The on-premises Windows authentication user name. Type: @@ -65,10 +63,10 @@ class SqlServerLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlServerLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None): + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py deleted file mode 100644 index 3eb8c5063dc1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Required. The connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: - string (or Expression with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py index b3cbe492bbf2..0cdda8b343a3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -15,8 +15,6 @@ class SqlServerSink(CopySink): """A copy activity SQL server sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -82,12 +80,12 @@ class SqlServerSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlServerSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py deleted file mode 100644 index dd5daf2c5660..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlServerSink(CopySink): - """A copy activity SQL server sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py index f9aa011047ea..a8f6984d95a5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -15,8 +15,6 @@ class SqlServerSource(CopySource): """A copy activity SQL server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SqlServerSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -64,10 +62,10 @@ class SqlServerSource(CopySource): 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlServerSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.produce_additional_types = kwargs.get('produce_additional_types', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None): + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py deleted file mode 100644 index 27d12985e595..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py +++ /dev/null @@ -1,73 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlServerSource(CopySource): - """A copy activity SQL server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param produce_additional_types: Which additional types to produce. - :type produce_additional_types: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types - self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py index 6f31002f32d1..bcf259485df2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py @@ -15,12 +15,10 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,15 +26,15 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: - string (or Expression with resultType string). + :param stored_procedure_name: Stored procedure name. Type: string (or + Expression with resultType string). :type stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". @@ -63,8 +61,8 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, **kwargs): - super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.stored_procedure_name = kwargs.get('stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + def __init__(self, name, stored_procedure_name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None): + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py deleted file mode 100644 index 477f0c6c775c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class SqlServerStoredProcedureActivity(ExecutionActivity): - """SQL stored procedure activity type. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Required. Stored procedure name. Type: - string (or Expression with resultType string). - :type stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'stored_procedure_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.stored_procedure_name = stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py index 3998671ee8ae..f8cb18e56be1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -15,8 +15,6 @@ class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SqlServerTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SqlServerTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class SqlServerTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlServerTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None): + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.sql_server_table_dataset_schema = sql_server_table_dataset_schema + self.table = table self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py deleted file mode 100644 index 989780c9bfda..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param sql_server_table_dataset_schema: The schema name of the SQL Server - dataset. Type: string (or Expression with resultType string). - :type sql_server_table_dataset_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or - Expression with resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.sql_server_table_dataset_schema = sql_server_table_dataset_schema - self.table = table - self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 3a81c5f7ea2f..e35d38558224 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -15,8 +15,6 @@ class SqlSink(CopySink): """A copy activity SQL sink. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -82,12 +80,12 @@ class SqlSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SqlSink, self).__init__(**kwargs) - self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) - self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) - self.pre_copy_script = kwargs.get('pre_copy_script', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) - self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) - self.table_option = kwargs.get('table_option', None) + def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py deleted file mode 100644 index d33810d9abef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_sink_py3 import CopySink - - -class SqlSink(CopySink): - """A copy activity SQL sink. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param write_batch_size: Write batch size. Type: integer (or Expression - with resultType integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or - Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression - with resultType integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with - resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the sink data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_writer_stored_procedure_name: SQL writer stored procedure name. - Type: string (or Expression with resultType string). - :type sql_writer_stored_procedure_name: object - :param sql_writer_table_type: SQL writer table type. Type: string (or - Expression with resultType string). - :type sql_writer_table_type: object - :param pre_copy_script: SQL pre-copy script. Type: string (or Expression - with resultType string). - :type pre_copy_script: object - :param stored_procedure_parameters: SQL stored procedure parameters. - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - :param stored_procedure_table_type_parameter_name: The stored procedure - parameter name of the table type. Type: string (or Expression with - resultType string). - :type stored_procedure_table_type_parameter_name: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, - 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, - 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option - self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py index bb31474b1f7c..3c723a28cb39 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -15,8 +15,6 @@ class SqlSource(CopySource): """A copy activity SQL source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -61,9 +59,9 @@ class SqlSource(CopySource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, **kwargs): - super(SqlSource, self).__init__(**kwargs) - self.sql_reader_query = kwargs.get('sql_reader_query', None) - self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) - self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None): + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py deleted file mode 100644 index dcad458fd4a6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SqlSource(CopySource): - """A copy activity SQL source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param sql_reader_query: SQL reader query. Type: string (or Expression - with resultType string). - :type sql_reader_query: object - :param sql_reader_stored_procedure_name: Name of the stored procedure for - a SQL Database source. This cannot be used at the same time as - SqlReaderQuery. Type: string (or Expression with resultType string). - :type sql_reader_stored_procedure_name: object - :param stored_procedure_parameters: Value and type setting for stored - procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". - :type stored_procedure_parameters: dict[str, - ~azure.mgmt.datafactory.models.StoredProcedureParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, - 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py index 4edfc8b211f7..cbfc41bf357d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -15,8 +15,6 @@ class SquareLinkedService(LinkedService): """Square Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,19 +29,17 @@ class SquareLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The URL of the Square instance. (i.e. - mystore.mysquare.com) + :param host: The URL of the Square instance. (i.e. mystore.mysquare.com) :type host: object - :param client_id: Required. The client ID associated with your Square - application. + :param client_id: The client ID associated with your Square application. :type client_id: object :param client_secret: The client secret associated with your Square application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square - application dashboard. (i.e. http://localhost:2500) + :param redirect_uri: The redirect URL assigned in the Square application + dashboard. (i.e. http://localhost:2500) :type redirect_uri: object :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -85,14 +81,14 @@ class SquareLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SquareLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.redirect_uri = kwargs.get('redirect_uri', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py deleted file mode 100644 index 40719f600a18..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SquareLinkedService(LinkedService): - """Square Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The URL of the Square instance. (i.e. - mystore.mysquare.com) - :type host: object - :param client_id: Required. The client ID associated with your Square - application. - :type client_id: object - :param client_secret: The client secret associated with your Square - application. - :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: Required. The redirect URL assigned in the Square - application dashboard. (i.e. http://localhost:2500) - :type redirect_uri: object - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, - 'redirect_uri': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.redirect_uri = redirect_uri - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py index 3903382d2e3a..8292782f5fdf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py @@ -15,8 +15,6 @@ class SquareObjectDataset(Dataset): """Square Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SquareObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SquareObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class SquareObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SquareObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py deleted file mode 100644 index 6d624dc6feef..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SquareObjectDataset(Dataset): - """Square Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py index f083df43f13a..7ef5e5912ff0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -15,8 +15,6 @@ class SquareSource(CopySource): """A copy activity Square Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SquareSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SquareSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SquareSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py deleted file mode 100644 index ec8a741d564c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SquareSource(CopySource): - """A copy activity Square Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py index 63512fdec4d8..072c579366c4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py @@ -15,13 +15,11 @@ class SSISAccessCredential(Model): """SSIS access credential. - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. + :param domain: Domain for windows authentication. :type domain: object - :param user_name: Required. UseName for windows authentication. + :param user_name: UseName for windows authentication. :type user_name: object - :param password: Required. Password for windows authentication. + :param password: Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ @@ -37,8 +35,8 @@ class SSISAccessCredential(Model): 'password': {'key': 'password', 'type': 'SecureString'}, } - def __init__(self, **kwargs): - super(SSISAccessCredential, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) + def __init__(self, domain, user_name, password): + super(SSISAccessCredential, self).__init__() + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py deleted file mode 100644 index 5df0fc8941da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISAccessCredential(Model): - """SSIS access credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, *, domain, user_name, password, **kwargs) -> None: - super(SSISAccessCredential, self).__init__(**kwargs) - self.domain = domain - self.user_name = user_name - self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py index 5dff9764e2a2..95d2e838ec9b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -15,15 +15,13 @@ class SsisEnvironment(SsisObjectMetadata): """Ssis environment. - All required parameters must be populated in order to send to Azure. - :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_id: Folder id which contains environment. :type folder_id: long @@ -44,8 +42,8 @@ class SsisEnvironment(SsisObjectMetadata): 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, } - def __init__(self, **kwargs): - super(SsisEnvironment, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.variables = kwargs.get('variables', None) + def __init__(self, id=None, name=None, description=None, folder_id=None, variables=None): + super(SsisEnvironment, self).__init__(id=id, name=name, description=description) + self.folder_id = folder_id + self.variables = variables self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py deleted file mode 100644 index 43697ba62146..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisEnvironment(SsisObjectMetadata): - """Ssis environment. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains environment. - :type folder_id: long - :param variables: Variable in environment - :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: - super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.variables = variables - self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py index e7d31d369392..9a035fa35f35 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -32,9 +32,9 @@ class SsisEnvironmentReference(Model): 'reference_type': {'key': 'referenceType', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.environment_folder_name = kwargs.get('environment_folder_name', None) - self.environment_name = kwargs.get('environment_name', None) - self.reference_type = kwargs.get('reference_type', None) + def __init__(self, id=None, environment_folder_name=None, environment_name=None, reference_type=None): + super(SsisEnvironmentReference, self).__init__() + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py deleted file mode 100644 index 14cbfca99d4f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisEnvironmentReference(Model): - """Ssis environment reference. - - :param id: Environment reference id. - :type id: long - :param environment_folder_name: Environment folder name. - :type environment_folder_name: str - :param environment_name: Environment name. - :type environment_name: str - :param reference_type: Reference type - :type reference_type: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, - 'environment_name': {'key': 'environmentName', 'type': 'str'}, - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: - super(SsisEnvironmentReference, self).__init__(**kwargs) - self.id = id - self.environment_folder_name = environment_folder_name - self.environment_name = environment_name - self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py index c090694416a9..3c56344ca7aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py @@ -15,13 +15,11 @@ class SSISExecutionCredential(Model): """SSIS package execution credential. - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. + :param domain: Domain for windows authentication. :type domain: object - :param user_name: Required. UseName for windows authentication. + :param user_name: UseName for windows authentication. :type user_name: object - :param password: Required. Password for windows authentication. + :param password: Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ @@ -37,8 +35,8 @@ class SSISExecutionCredential(Model): 'password': {'key': 'password', 'type': 'SecureString'}, } - def __init__(self, **kwargs): - super(SSISExecutionCredential, self).__init__(**kwargs) - self.domain = kwargs.get('domain', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) + def __init__(self, domain, user_name, password): + super(SSISExecutionCredential, self).__init__() + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py deleted file mode 100644 index 051eaffa2bf2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISExecutionCredential(Model): - """SSIS package execution credential. - - All required parameters must be populated in order to send to Azure. - - :param domain: Required. Domain for windows authentication. - :type domain: object - :param user_name: Required. UseName for windows authentication. - :type user_name: object - :param password: Required. Password for windows authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - """ - - _validation = { - 'domain': {'required': True}, - 'user_name': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'domain': {'key': 'domain', 'type': 'object'}, - 'user_name': {'key': 'userName', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - } - - def __init__(self, *, domain, user_name, password, **kwargs) -> None: - super(SSISExecutionCredential, self).__init__(**kwargs) - self.domain = domain - self.user_name = user_name - self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py index 36f295c5a4aa..e57afb5b5798 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py @@ -15,10 +15,8 @@ class SSISExecutionParameter(Model): """SSIS execution parameter. - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: - string (or Expression with resultType string). + :param value: SSIS package execution parameter value. Type: string (or + Expression with resultType string). :type value: object """ @@ -30,6 +28,6 @@ class SSISExecutionParameter(Model): 'value': {'key': 'value', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SSISExecutionParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) + def __init__(self, value): + super(SSISExecutionParameter, self).__init__() + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py deleted file mode 100644 index cd10dd457a42..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISExecutionParameter(Model): - """SSIS execution parameter. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package execution parameter value. Type: - string (or Expression with resultType string). - :type value: object - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, *, value, **kwargs) -> None: - super(SSISExecutionParameter, self).__init__(**kwargs) - self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py index 350b0d92852b..c2fdf4ce689f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -15,15 +15,13 @@ class SsisFolder(SsisObjectMetadata): """Ssis folder. - All required parameters must be populated in order to send to Azure. - :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -31,13 +29,6 @@ class SsisFolder(SsisObjectMetadata): 'type': {'required': True}, } - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SsisFolder, self).__init__(**kwargs) + def __init__(self, id=None, name=None, description=None): + super(SsisFolder, self).__init__(id=id, name=name, description=description) self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py deleted file mode 100644 index d6483fda2c08..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisFolder(SsisObjectMetadata): - """Ssis folder. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: - super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) - self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py index cfdebe717541..96e3bdc053c4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py @@ -18,13 +18,10 @@ class SSISLogLocation(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: - string (or Expression with resultType string). + :param log_path: The SSIS package execution log path. Type: string (or + Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File" - . + :ivar type: The type of SSIS log location. Default value: "File" . :vartype type: str :param access_credential: The package execution log access credential. :type access_credential: @@ -50,8 +47,8 @@ class SSISLogLocation(Model): type = "File" - def __init__(self, **kwargs): - super(SSISLogLocation, self).__init__(**kwargs) - self.log_path = kwargs.get('log_path', None) - self.access_credential = kwargs.get('access_credential', None) - self.log_refresh_interval = kwargs.get('log_refresh_interval', None) + def __init__(self, log_path, access_credential=None, log_refresh_interval=None): + super(SSISLogLocation, self).__init__() + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py deleted file mode 100644 index de4fbe35dcb3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISLogLocation(Model): - """SSIS package execution log location. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param log_path: Required. The SSIS package execution log path. Type: - string (or Expression with resultType string). - :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File" - . - :vartype type: str - :param access_credential: The package execution log access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param log_refresh_interval: Specifies the interval to refresh log. The - default interval is 5 minutes. Type: string (or Expression with resultType - string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type log_refresh_interval: object - """ - - _validation = { - 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, - } - - _attribute_map = { - 'log_path': {'key': 'logPath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, - } - - type = "File" - - def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: - super(SSISLogLocation, self).__init__(**kwargs) - self.log_path = log_path - self.access_credential = access_credential - self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py index 811075137f41..b14732944663 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -18,15 +18,13 @@ class SsisObjectMetadata(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder - All required parameters must be populated in order to send to Azure. - :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -45,9 +43,9 @@ class SsisObjectMetadata(Model): 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} } - def __init__(self, **kwargs): - super(SsisObjectMetadata, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) + def __init__(self, id=None, name=None, description=None): + super(SsisObjectMetadata, self).__init__() + self.id = id + self.name = name + self.description = description self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py index a029c9f7ebc4..2d07080418d8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py @@ -27,7 +27,7 @@ class SsisObjectMetadataListResponse(Model): 'next_link': {'key': 'nextLink', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) + def __init__(self, value=None, next_link=None): + super(SsisObjectMetadataListResponse, self).__init__() + self.value = value + self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py deleted file mode 100644 index 79931e1ceaf7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadataListResponse(Model): - """A list of SSIS object metadata. - - :param value: List of SSIS object metadata. - :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] - :param next_link: The link to the next page of results, if any remaining - results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: - super(SsisObjectMetadataListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py deleted file mode 100644 index 45f7e15af4fa..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadata(Model): - """SSIS object metadata. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: - super(SsisObjectMetadata, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py index 9b782613ee08..51424c6aefcc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py @@ -32,9 +32,9 @@ class SsisObjectMetadataStatusResponse(Model): 'error': {'key': 'error', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) - self.error = kwargs.get('error', None) + def __init__(self, status=None, name=None, properties=None, error=None): + super(SsisObjectMetadataStatusResponse, self).__init__() + self.status = status + self.name = name + self.properties = properties + self.error = error diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py deleted file mode 100644 index a4b82b8f6bcd..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisObjectMetadataStatusResponse(Model): - """The status of the operation. - - :param status: The status of the operation. - :type status: str - :param name: The operation name. - :type name: str - :param properties: The operation properties. - :type properties: str - :param error: The operation error message. - :type error: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: - super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) - self.status = status - self.name = name - self.properties = properties - self.error = error diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py index b04fc1138797..9b1d18ce1ee6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -15,15 +15,13 @@ class SsisPackage(SsisObjectMetadata): """Ssis Package. - All required parameters must be populated in order to send to Azure. - :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_id: Folder id which contains package. :type folder_id: long @@ -50,10 +48,10 @@ class SsisPackage(SsisObjectMetadata): 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } - def __init__(self, **kwargs): - super(SsisPackage, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.project_version = kwargs.get('project_version', None) - self.project_id = kwargs.get('project_id', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, id=None, name=None, description=None, folder_id=None, project_version=None, project_id=None, parameters=None): + super(SsisPackage, self).__init__(id=id, name=name, description=description) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py index 248d0aa9b8ae..3e44834cdfb0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -15,10 +15,8 @@ class SSISPackageLocation(Model): """SSIS package location. - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. The SSIS package path. Type: string (or - Expression with resultType string). + :param package_path: The SSIS package path. Type: string (or Expression + with resultType string). :type package_path: object :param type: The type of SSIS package location. Possible values include: 'SSISDB', 'File' @@ -45,10 +43,10 @@ class SSISPackageLocation(Model): 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SSISPackageLocation, self).__init__(**kwargs) - self.package_path = kwargs.get('package_path', None) - self.type = kwargs.get('type', None) - self.package_password = kwargs.get('package_password', None) - self.access_credential = kwargs.get('access_credential', None) - self.configuration_path = kwargs.get('configuration_path', None) + def __init__(self, package_path, type=None, package_password=None, access_credential=None, configuration_path=None): + super(SSISPackageLocation, self).__init__() + self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py deleted file mode 100644 index cc442d8d35b8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py +++ /dev/null @@ -1,54 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISPackageLocation(Model): - """SSIS package location. - - All required parameters must be populated in order to send to Azure. - - :param package_path: Required. The SSIS package path. Type: string (or - Expression with resultType string). - :type package_path: object - :param type: The type of SSIS package location. Possible values include: - 'SSISDB', 'File' - :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType - :param package_password: Password of the package. - :type package_password: ~azure.mgmt.datafactory.models.SecureString - :param access_credential: The package access credential. - :type access_credential: - ~azure.mgmt.datafactory.models.SSISAccessCredential - :param configuration_path: The configuration file of the package - execution. Type: string (or Expression with resultType string). - :type configuration_path: object - """ - - _validation = { - 'package_path': {'required': True}, - } - - _attribute_map = { - 'package_path': {'key': 'packagePath', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, - 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, - 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, - } - - def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: - super(SSISPackageLocation, self).__init__(**kwargs) - self.package_path = package_path - self.type = type - self.package_password = package_password - self.access_credential = access_credential - self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py deleted file mode 100644 index e1e932e97ae6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisPackage(SsisObjectMetadata): - """Ssis Package. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains package. - :type folder_id: long - :param project_version: Project version which contains package. - :type project_version: long - :param project_id: Project id which contains package. - :type project_id: long - :param parameters: Parameters in package - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'project_version': {'key': 'projectVersion', 'type': 'long'}, - 'project_id': {'key': 'projectId', 'type': 'long'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: - super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.project_version = project_version - self.project_id = project_id - self.parameters = parameters - self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py index c456af0bab48..c75b2cf9a1f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -56,17 +56,17 @@ class SsisParameter(Model): 'variable': {'key': 'variable', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SsisParameter, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.required = kwargs.get('required', None) - self.sensitive = kwargs.get('sensitive', None) - self.design_default_value = kwargs.get('design_default_value', None) - self.default_value = kwargs.get('default_value', None) - self.sensitive_default_value = kwargs.get('sensitive_default_value', None) - self.value_type = kwargs.get('value_type', None) - self.value_set = kwargs.get('value_set', None) - self.variable = kwargs.get('variable', None) + def __init__(self, id=None, name=None, description=None, data_type=None, required=None, sensitive=None, design_default_value=None, default_value=None, sensitive_default_value=None, value_type=None, value_set=None, variable=None): + super(SsisParameter, self).__init__() + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py deleted file mode 100644 index 6a4ff73768f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisParameter(Model): - """Ssis parameter. - - :param id: Parameter id. - :type id: long - :param name: Parameter name. - :type name: str - :param description: Parameter description. - :type description: str - :param data_type: Parameter type. - :type data_type: str - :param required: Whether parameter is required. - :type required: bool - :param sensitive: Whether parameter is sensitive. - :type sensitive: bool - :param design_default_value: Design default value of parameter. - :type design_default_value: str - :param default_value: Default value of parameter. - :type default_value: str - :param sensitive_default_value: Default sensitive value of parameter. - :type sensitive_default_value: str - :param value_type: Parameter value type. - :type value_type: str - :param value_set: Parameter value set. - :type value_set: bool - :param variable: Parameter reference variable. - :type variable: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'required': {'key': 'required', 'type': 'bool'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'str'}, - 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, - 'value_type': {'key': 'valueType', 'type': 'str'}, - 'value_set': {'key': 'valueSet', 'type': 'bool'}, - 'variable': {'key': 'variable', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: - super(SsisParameter, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.required = required - self.sensitive = sensitive - self.design_default_value = design_default_value - self.default_value = default_value - self.sensitive_default_value = sensitive_default_value - self.value_type = value_type - self.value_set = value_set - self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py index c29a36fb628e..979a07bb1e80 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -15,15 +15,13 @@ class SsisProject(SsisObjectMetadata): """Ssis project. - All required parameters must be populated in order to send to Azure. - :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param folder_id: Folder id which contains project. :type folder_id: long @@ -51,10 +49,10 @@ class SsisProject(SsisObjectMetadata): 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } - def __init__(self, **kwargs): - super(SsisProject, self).__init__(**kwargs) - self.folder_id = kwargs.get('folder_id', None) - self.version = kwargs.get('version', None) - self.environment_refs = kwargs.get('environment_refs', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, id=None, name=None, description=None, folder_id=None, version=None, environment_refs=None, parameters=None): + super(SsisProject, self).__init__(id=id, name=name, description=description) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py deleted file mode 100644 index 11b95a644e2f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .ssis_object_metadata_py3 import SsisObjectMetadata - - -class SsisProject(SsisObjectMetadata): - """Ssis project. - - All required parameters must be populated in order to send to Azure. - - :param id: Metadata id. - :type id: long - :param name: Metadata name. - :type name: str - :param description: Metadata description. - :type description: str - :param type: Required. Constant filled by server. - :type type: str - :param folder_id: Folder id which contains project. - :type folder_id: long - :param version: Project version. - :type version: long - :param environment_refs: Environment reference in project - :type environment_refs: - list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] - :param parameters: Parameters in project - :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_id': {'key': 'folderId', 'type': 'long'}, - 'version': {'key': 'version', 'type': 'long'}, - 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, - 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: - super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) - self.folder_id = folder_id - self.version = version - self.environment_refs = environment_refs - self.parameters = parameters - self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py index 30b78594e6ab..85e6d4645b9a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py @@ -15,10 +15,8 @@ class SSISPropertyOverride(Model): """SSIS property override. - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string - (or Expression with resultType string). + :param value: SSIS package property override value. Type: string (or + Expression with resultType string). :type value: object :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true @@ -34,7 +32,7 @@ class SSISPropertyOverride(Model): 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, } - def __init__(self, **kwargs): - super(SSISPropertyOverride, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.is_sensitive = kwargs.get('is_sensitive', None) + def __init__(self, value, is_sensitive=None): + super(SSISPropertyOverride, self).__init__() + self.value = value + self.is_sensitive = is_sensitive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py deleted file mode 100644 index b425a19adc7e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SSISPropertyOverride(Model): - """SSIS property override. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. SSIS package property override value. Type: string - (or Expression with resultType string). - :type value: object - :param is_sensitive: Whether SSIS package property override value is - sensitive data. Value will be encrypted in SSISDB if it is true - :type is_sensitive: bool - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, - } - - def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: - super(SSISPropertyOverride, self).__init__(**kwargs) - self.value = value - self.is_sensitive = is_sensitive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py index 73fda3b27967..8e12fda20149 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -41,12 +41,12 @@ class SsisVariable(Model): 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SsisVariable, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.name = kwargs.get('name', None) - self.description = kwargs.get('description', None) - self.data_type = kwargs.get('data_type', None) - self.sensitive = kwargs.get('sensitive', None) - self.value = kwargs.get('value', None) - self.sensitive_value = kwargs.get('sensitive_value', None) + def __init__(self, id=None, name=None, description=None, data_type=None, sensitive=None, value=None, sensitive_value=None): + super(SsisVariable, self).__init__() + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py deleted file mode 100644 index e709842ff465..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SsisVariable(Model): - """Ssis variable. - - :param id: Variable id. - :type id: long - :param name: Variable name. - :type name: str - :param description: Variable description. - :type description: str - :param data_type: Variable type. - :type data_type: str - :param sensitive: Whether variable is sensitive. - :type sensitive: bool - :param value: Variable value. - :type value: str - :param sensitive_value: Variable sensitive value. - :type sensitive_value: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'sensitive': {'key': 'sensitive', 'type': 'bool'}, - 'value': {'key': 'value', 'type': 'str'}, - 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, - } - - def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: - super(SsisVariable, self).__init__(**kwargs) - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.sensitive = sensitive - self.value = value - self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py index 05ca8dff2c52..61efe881513e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py @@ -15,12 +15,10 @@ class StagingSettings(Model): """Staging settings. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. + :param linked_service_name: Staging linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: @@ -43,9 +41,9 @@ class StagingSettings(Model): 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, } - def __init__(self, **kwargs): - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.path = kwargs.get('path', None) - self.enable_compression = kwargs.get('enable_compression', None) + def __init__(self, linked_service_name, additional_properties=None, path=None, enable_compression=None): + super(StagingSettings, self).__init__() + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + self.enable_compression = enable_compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py deleted file mode 100644 index 13b4353963a3..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StagingSettings(Model): - """Staging settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param linked_service_name: Required. Staging linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param path: The path to storage for storing the interim data. Type: - string (or Expression with resultType string). - :type path: object - :param enable_compression: Specifies whether to use compression when - copying data via an interim staging. Default value is false. Type: boolean - (or Expression with resultType boolean). - :type enable_compression: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, - 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: - super(StagingSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path - self.enable_compression = enable_compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py index c12c0ce8860d..0f70fa4c1339 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py @@ -15,12 +15,10 @@ class StoreReadSettings(Model): """Connector read setting. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The read setting type. + :param type: The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -38,8 +36,8 @@ class StoreReadSettings(Model): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, } - def __init__(self, **kwargs): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + def __init__(self, type, additional_properties=None, max_concurrent_connections=None): + super(StoreReadSettings, self).__init__() + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py deleted file mode 100644 index e2026fd52b93..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoreReadSettings(Model): - """Connector read setting. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. The read setting type. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py index 728b8cdd8c89..74810b8e51b6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py @@ -19,8 +19,6 @@ class StoreWriteSettings(Model): sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -30,7 +28,7 @@ class StoreWriteSettings(Model): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -49,9 +47,9 @@ class StoreWriteSettings(Model): 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} } - def __init__(self, **kwargs): - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.copy_behavior = kwargs.get('copy_behavior', None) + def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None): + super(StoreWriteSettings, self).__init__() + self.additional_properties = additional_properties + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py deleted file mode 100644 index 7cce5d205541..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoreWriteSettings(Model): - """Connector write settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, - AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param copy_behavior: The type of copy behavior for copy sink. - :type copy_behavior: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(StoreWriteSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.max_concurrent_connections = max_concurrent_connections - self.copy_behavior = copy_behavior - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py index ff16595aa8c7..d480dc7f0931 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -29,7 +29,7 @@ class StoredProcedureParameter(Model): 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, **kwargs): - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.type = kwargs.get('type', None) + def __init__(self, value=None, type=None): + super(StoredProcedureParameter, self).__init__() + self.value = value + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py deleted file mode 100644 index 2842ef9ae35c..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class StoredProcedureParameter(Model): - """SQL stored procedure parameter. - - :param value: Stored procedure parameter value. Type: string (or - Expression with resultType string). - :type value: object - :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' - :type type: str or - ~azure.mgmt.datafactory.models.StoredProcedureParameterType - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, value=None, type=None, **kwargs) -> None: - super(StoredProcedureParameter, self).__init__(**kwargs) - self.value = value - self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py index c80b531db7d1..89a27ab2a0c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py @@ -42,8 +42,8 @@ class SubResource(Model): 'etag': {'key': 'etag', 'type': 'str'}, } - def __init__(self, **kwargs): - super(SubResource, self).__init__(**kwargs) + def __init__(self): + super(SubResource, self).__init__() self.id = None self.name = None self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py deleted file mode 100644 index 3b2d9ec62366..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py index 83de0e6f61f2..6fef081a9dbc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -15,8 +15,6 @@ class SybaseLinkedService(LinkedService): """Linked service for Sybase data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,13 +29,13 @@ class SybaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. Type: string (or Expression + with resultType string). :type database: object :param schema: Schema name for connection. Type: string (or Expression with resultType string). @@ -79,13 +77,13 @@ class SybaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SybaseLinkedService, self).__init__(**kwargs) - self.server = kwargs.get('server', None) - self.database = kwargs.get('database', None) - self.schema = kwargs.get('schema', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None): + super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.server = server + self.database = database + self.schema = schema + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py deleted file mode 100644 index 5b6cc0ce6ded..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class SybaseLinkedService(LinkedService): - """Linked service for Sybase data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). - :type database: object - :param schema: Schema name for connection. Type: string (or Expression - with resultType string). - :type schema: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.SybaseAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.server = server - self.database = database - self.schema = schema - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py index 02f89a8fca25..9fd5a579917c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py @@ -15,8 +15,6 @@ class SybaseSource(CopySource): """A copy activity source for Sybase databases. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class SybaseSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class SybaseSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SybaseSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py deleted file mode 100644 index c11e96174349..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class SybaseSource(CopySource): - """A copy activity source for Sybase databases. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Database query. Type: string (or Expression with resultType - string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py index ff2dfd5471fb..a1aab1b6cdb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py @@ -15,8 +15,6 @@ class SybaseTableDataset(Dataset): """The Sybase table dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class SybaseTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class SybaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The Sybase table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class SybaseTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(SybaseTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py deleted file mode 100644 index 88e9d3c287fe..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class SybaseTableDataset(Dataset): - """The Sybase table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The Sybase table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index 6e02b0d389ab..a1da6b219d03 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -15,8 +15,6 @@ class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class TeradataLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -73,12 +71,12 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(TeradataLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs.get('server', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None): + super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py deleted file mode 100644 index aac40efe69e0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class TeradataLinkedService(LinkedService): - """Linked service for Teradata data source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: Teradata ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Server name for connection. Type: string (or Expression - with resultType string). - :type server: object - :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic', 'Windows' - :type authentication_type: str or - ~azure.mgmt.datafactory.models.TeradataAuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: - super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential - self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py index 0f9c023f9553..b093db33f980 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -35,8 +35,8 @@ class TeradataPartitionSettings(Model): 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - def __init__(self, **kwargs): - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = kwargs.get('partition_column_name', None) - self.partition_upper_bound = kwargs.get('partition_upper_bound', None) - self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + def __init__(self, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None): + super(TeradataPartitionSettings, self).__init__() + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py deleted file mode 100644 index 04824e614ff2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TeradataPartitionSettings(Model): - """The settings that will be leveraged for teradata source partitioning. - - :param partition_column_name: The name of the column that will be used for - proceeding range or hash partitioning. Type: string (or Expression with - resultType string). - :type partition_column_name: object - :param partition_upper_bound: The maximum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_upper_bound: object - :param partition_lower_bound: The minimum value of column specified in - partitionColumnName that will be used for proceeding range partitioning. - Type: string (or Expression with resultType string). - :type partition_lower_bound: object - """ - - _attribute_map = { - 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, - 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, - 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, - } - - def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: - super(TeradataPartitionSettings, self).__init__(**kwargs) - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py index 81d1c8e202c1..9255ab18e746 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -15,8 +15,6 @@ class TeradataSource(CopySource): """A copy activity Teradata source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class TeradataSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: Teradata query. Type: string (or Expression with resultType string). @@ -62,9 +60,9 @@ class TeradataSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } - def __init__(self, **kwargs): - super(TeradataSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) - self.partition_option = kwargs.get('partition_option', None) - self.partition_settings = kwargs.get('partition_settings', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None): + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py deleted file mode 100644 index 79d8ccb01f14..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py +++ /dev/null @@ -1,70 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class TeradataSource(CopySource): - """A copy activity Teradata source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: Teradata query. Type: string (or Expression with resultType - string). - :type query: object - :param partition_option: The partition mechanism that will be used for - teradata read in parallel. Possible values include: 'None', 'Hash', - 'DynamicRange' - :type partition_option: str or - ~azure.mgmt.datafactory.models.TeradataPartitionOption - :param partition_settings: The settings that will be leveraged for - teradata source partitioning. - :type partition_settings: - ~azure.mgmt.datafactory.models.TeradataPartitionSettings - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings - self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py index e396bfd6fb15..8aceb5aeeaeb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -15,8 +15,6 @@ class TeradataTableDataset(Dataset): """The Teradata database dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class TeradataTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class TeradataTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param database: The database name of Teradata. Type: string (or Expression with resultType string). @@ -70,8 +68,8 @@ class TeradataTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, **kwargs): - super(TeradataTableDataset, self).__init__(**kwargs) - self.database = kwargs.get('database', None) - self.table = kwargs.get('table', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None): + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.database = database + self.table = table self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py deleted file mode 100644 index 892707b7f133..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py +++ /dev/null @@ -1,77 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class TeradataTableDataset(Dataset): - """The Teradata database dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param database: The database name of Teradata. Type: string (or - Expression with resultType string). - :type database: object - :param table: The table name of Teradata. Type: string (or Expression with - resultType string). - :type table: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: - super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.database = database - self.table = table - self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py index 48f32bf10133..60d963930e56 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py @@ -15,8 +15,6 @@ class TextFormat(DatasetStorageFormat): """The data stored in text format. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -26,7 +24,7 @@ class TextFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). @@ -85,15 +83,15 @@ class TextFormat(DatasetStorageFormat): 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, } - def __init__(self, **kwargs): - super(TextFormat, self).__init__(**kwargs) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.escape_char = kwargs.get('escape_char', None) - self.quote_char = kwargs.get('quote_char', None) - self.null_value = kwargs.get('null_value', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) - self.skip_line_count = kwargs.get('skip_line_count', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) + def __init__(self, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None): + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py deleted file mode 100644 index 0d876f62b112..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_storage_format_py3 import DatasetStorageFormat - - -class TextFormat(DatasetStorageFormat): - """The data stored in text format. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param serializer: Serializer. Type: string (or Expression with resultType - string). - :type serializer: object - :param deserializer: Deserializer. Type: string (or Expression with - resultType string). - :type deserializer: object - :param type: Required. Constant filled by server. - :type type: str - :param column_delimiter: The column delimiter. Type: string (or Expression - with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with - resultType string). - :type row_delimiter: object - :param escape_char: The escape character. Type: string (or Expression with - resultType string). - :type escape_char: object - :param quote_char: The quote character. Type: string (or Expression with - resultType string). - :type quote_char: object - :param null_value: The null value string. Type: string (or Expression with - resultType string). - :type null_value: object - :param encoding_name: The code page name of the preferred encoding. If - miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode - encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following - link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string - (or Expression with resultType string). - :type encoding_name: object - :param treat_empty_as_null: Treat empty column values in the text file as - null. The default value is true. Type: boolean (or Expression with - resultType boolean). - :type treat_empty_as_null: object - :param skip_line_count: The number of lines/rows to be skipped when - parsing text files. The default value is 0. Type: integer (or Expression - with resultType integer). - :type skip_line_count: object - :param first_row_as_header: When used as input, treat the first row of - data as headers. When used as output,write the headers into the output as - the first row of data. The default value is false. Type: boolean (or - Expression with resultType boolean). - :type first_row_as_header: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'serializer': {'key': 'serializer', 'type': 'object'}, - 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, - 'escape_char': {'key': 'escapeChar', 'type': 'object'}, - 'quote_char': {'key': 'quoteChar', 'type': 'object'}, - 'null_value': {'key': 'nullValue', 'type': 'object'}, - 'encoding_name': {'key': 'encodingName', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, - 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: - super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.escape_char = escape_char - self.quote_char = quote_char - self.null_value = null_value - self.encoding_name = encoding_name - self.treat_empty_as_null = treat_empty_as_null - self.skip_line_count = skip_line_count - self.first_row_as_header = first_row_as_header - self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py index 728ffc32bcb5..3350057fbeae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -23,8 +23,6 @@ class Trigger(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -38,7 +36,7 @@ class Trigger(Model): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -59,10 +57,10 @@ class Trigger(Model): 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } - def __init__(self, **kwargs): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) + def __init__(self, additional_properties=None, description=None, annotations=None): + super(Trigger, self).__init__() + self.additional_properties = additional_properties + self.description = description self.runtime_state = None - self.annotations = kwargs.get('annotations', None) + self.annotations = annotations self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py index 089aa9a3e5fc..fb87ceffcc45 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py @@ -18,11 +18,9 @@ class TriggerDependencyReference(DependencyReference): You probably want to use the sub-classes and not this class directly. Known sub-classes are: TumblingWindowTriggerDependencyReference - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param reference_trigger: Required. Referenced trigger. + :param reference_trigger: Referenced trigger. :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ @@ -40,7 +38,7 @@ class TriggerDependencyReference(DependencyReference): 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} } - def __init__(self, **kwargs): - super(TriggerDependencyReference, self).__init__(**kwargs) - self.reference_trigger = kwargs.get('reference_trigger', None) + def __init__(self, reference_trigger): + super(TriggerDependencyReference, self).__init__() + self.reference_trigger = reference_trigger self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py deleted file mode 100644 index 716a0d926f8b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dependency_reference_py3 import DependencyReference - - -class TriggerDependencyReference(DependencyReference): - """Trigger referenced dependency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TumblingWindowTriggerDependencyReference - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - } - - _subtype_map = { - 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} - } - - def __init__(self, *, reference_trigger, **kwargs) -> None: - super(TriggerDependencyReference, self).__init__(**kwargs) - self.reference_trigger = reference_trigger - self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py index 70c9f2904347..52d0dc2609bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py @@ -26,7 +26,7 @@ class TriggerPipelineReference(Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - def __init__(self, **kwargs): - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = kwargs.get('pipeline_reference', None) - self.parameters = kwargs.get('parameters', None) + def __init__(self, pipeline_reference=None, parameters=None): + super(TriggerPipelineReference, self).__init__() + self.pipeline_reference = pipeline_reference + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py deleted file mode 100644 index e32af8006326..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerPipelineReference(Model): - """Pipeline that needs to be triggered with the given parameters. - - :param pipeline_reference: Pipeline reference. - :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - """ - - _attribute_map = { - 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: - super(TriggerPipelineReference, self).__init__(**kwargs) - self.pipeline_reference = pipeline_reference - self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py deleted file mode 100644 index 862973544ab4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py +++ /dev/null @@ -1,68 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class Trigger(Model): - """Azure data factory nested object which contains information about creating - pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} - } - - def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: - super(Trigger, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.runtime_state = None - self.annotations = annotations - self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py index a4f952dac85f..bc87f79c1f24 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py @@ -18,12 +18,9 @@ class TriggerReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: - "TriggerReference" . + :ivar type: Trigger reference type. Default value: "TriggerReference" . :vartype type: str - :param reference_name: Required. Reference trigger name. + :param reference_name: Reference trigger name. :type reference_name: str """ @@ -39,6 +36,6 @@ class TriggerReference(Model): type = "TriggerReference" - def __init__(self, **kwargs): - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) + def __init__(self, reference_name): + super(TriggerReference, self).__init__() + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py deleted file mode 100644 index 805e407e80a7..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerReference(Model): - """Trigger reference type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Trigger reference type. Default value: - "TriggerReference" . - :vartype type: str - :param reference_name: Required. Reference trigger name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - } - - type = "TriggerReference" - - def __init__(self, *, reference_name: str, **kwargs) -> None: - super(TriggerReference, self).__init__(**kwargs) - self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py index 539ac4775350..ea35206bbed7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py @@ -18,8 +18,6 @@ class TriggerResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -28,7 +26,7 @@ class TriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Required. Properties of the trigger. + :param properties: Properties of the trigger. :type properties: ~azure.mgmt.datafactory.models.Trigger """ @@ -48,6 +46,6 @@ class TriggerResource(SubResource): 'properties': {'key': 'properties', 'type': 'Trigger'}, } - def __init__(self, **kwargs): - super(TriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + def __init__(self, properties): + super(TriggerResource, self).__init__() + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py deleted file mode 100644 index ae6a04ac3128..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .sub_resource_py3 import SubResource - - -class TriggerResource(SubResource): - """Trigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the trigger. - :type properties: ~azure.mgmt.datafactory.models.Trigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Trigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(TriggerResource, self).__init__(**kwargs) - self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py index 9fad7bbfd9fa..2fb74b9dcbab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py @@ -65,9 +65,9 @@ class TriggerRun(Model): 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, } - def __init__(self, **kwargs): - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + def __init__(self, additional_properties=None): + super(TriggerRun, self).__init__() + self.additional_properties = additional_properties self.trigger_run_id = None self.trigger_name = None self.trigger_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py deleted file mode 100644 index 5a9fe50f6894..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerRun(Model): - """Trigger runs. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :ivar trigger_run_id: Trigger run id. - :vartype trigger_run_id: str - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar trigger_type: Trigger type. - :vartype trigger_type: str - :ivar trigger_run_timestamp: Trigger run start time. - :vartype trigger_run_timestamp: datetime - :ivar status: Trigger run status. Possible values include: 'Succeeded', - 'Failed', 'Inprogress' - :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus - :ivar message: Trigger error message. - :vartype message: str - :ivar properties: List of property name and value related to trigger run. - Name, value pair depends on type of trigger. - :vartype properties: dict[str, str] - :ivar triggered_pipelines: List of pipeline name and run Id triggered by - the trigger run. - :vartype triggered_pipelines: dict[str, str] - """ - - _validation = { - 'trigger_run_id': {'readonly': True}, - 'trigger_name': {'readonly': True}, - 'trigger_type': {'readonly': True}, - 'trigger_run_timestamp': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, - 'properties': {'readonly': True}, - 'triggered_pipelines': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(TriggerRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.trigger_run_id = None - self.trigger_name = None - self.trigger_type = None - self.trigger_run_timestamp = None - self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py index 7684fe7eb7dc..a6dbecd8a524 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py @@ -15,9 +15,7 @@ class TriggerRunsQueryResponse(Model): """A list of trigger runs. - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. + :param value: List of trigger runs. :type value: list[~azure.mgmt.datafactory.models.TriggerRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -33,7 +31,7 @@ class TriggerRunsQueryResponse(Model): 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } - def __init__(self, **kwargs): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.continuation_token = kwargs.get('continuation_token', None) + def __init__(self, value, continuation_token=None): + super(TriggerRunsQueryResponse, self).__init__() + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py deleted file mode 100644 index 391a2441b3d1..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerRunsQueryResponse(Model): - """A list of trigger runs. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of trigger runs. - :type value: list[~azure.mgmt.datafactory.models.TriggerRun] - :param continuation_token: The continuation token for getting the next - page of results, if any remaining results exist, null otherwise. - :type continuation_token: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - } - - def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py index 6a581e757840..58125f8327c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py @@ -36,7 +36,7 @@ class TriggerSubscriptionOperationStatus(Model): 'status': {'key': 'status', 'type': 'str'}, } - def __init__(self, **kwargs): - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + def __init__(self): + super(TriggerSubscriptionOperationStatus, self).__init__() self.trigger_name = None self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py deleted file mode 100644 index 40ead4c50fe4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class TriggerSubscriptionOperationStatus(Model): - """Defines the response of a trigger subscription operation. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: - 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' - :vartype status: str or - ~azure.mgmt.datafactory.models.EventSubscriptionStatus - """ - - _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py index 939624ae5042..f69967dcc186 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -20,8 +20,6 @@ class TumblingWindowTrigger(Trigger): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -35,21 +33,21 @@ class TumblingWindowTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param pipeline: Required. Pipeline for which runs are created when an - event is fired for trigger window that is ready. + :param pipeline: Pipeline for which runs are created when an event is + fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' + :param frequency: The frequency of the time windows. Possible values + include: 'Minute', 'Hour' :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum - interval allowed is 15 Minutes. + :param interval: The interval of the time windows. The minimum interval + allowed is 15 Minutes. :type interval: int - :param start_time: Required. The start time for the time period for the - trigger during which events are fired for windows that are ready. Only UTC - time is currently supported. + :param start_time: The start time for the time period for the trigger + during which events are fired for windows that are ready. Only UTC time is + currently supported. :type start_time: datetime :param end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is @@ -60,8 +58,8 @@ class TumblingWindowTrigger(Trigger): default is 0. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type delay: object - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a new run is triggered. + :param max_concurrency: The max number of parallel time windows (ready for + execution) for which a new run is triggered. :type max_concurrency: int :param retry_policy: Retry policy that will be applied for failed pipeline runs. @@ -98,15 +96,15 @@ class TumblingWindowTrigger(Trigger): 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } - def __init__(self, **kwargs): - super(TumblingWindowTrigger, self).__init__(**kwargs) - self.pipeline = kwargs.get('pipeline', None) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.delay = kwargs.get('delay', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - self.retry_policy = kwargs.get('retry_policy', None) - self.depends_on = kwargs.get('depends_on', None) + def __init__(self, pipeline, frequency, interval, start_time, max_concurrency, additional_properties=None, description=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None): + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations) + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py index 89dcefbc8c09..67f5592f3cdb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py @@ -15,11 +15,9 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): """Referenced tumbling window trigger dependency. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param reference_trigger: Required. Referenced trigger. + :param reference_trigger: Referenced trigger. :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :param offset: Timespan applied to the start time of a tumbling window when evaluating dependency. @@ -43,8 +41,8 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): 'size': {'key': 'size', 'type': 'str'}, } - def __init__(self, **kwargs): - super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) - self.offset = kwargs.get('offset', None) - self.size = kwargs.get('size', None) + def __init__(self, reference_trigger, offset=None, size=None): + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger) + self.offset = offset + self.size = size self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py deleted file mode 100644 index 648f25e59937..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_dependency_reference_py3 import TriggerDependencyReference - - -class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): - """Referenced tumbling window trigger dependency. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Constant filled by server. - :type type: str - :param reference_trigger: Required. Referenced trigger. - :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference - :param offset: Timespan applied to the start time of a tumbling window - when evaluating dependency. - :type offset: str - :param size: The size of the window when evaluating the dependency. If - undefined the frequency of the tumbling window will be used. - :type size: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, - 'offset': {'key': 'offset', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - } - - def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: - super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) - self.offset = offset - self.size = size - self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py deleted file mode 100644 index 6856629c8b91..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .trigger_py3 import Trigger - - -class TumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline runs for all fixed time interval windows - from a start time without gaps and also supports backfill scenarios (when - start time is in the past). - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when - Start/Stop APIs are called on the Trigger. Possible values include: - 'Started', 'Stopped', 'Disabled' - :vartype runtime_state: str or - ~azure.mgmt.datafactory.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the - trigger. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param pipeline: Required. Pipeline for which runs are created when an - event is fired for trigger window that is ready. - :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: Required. The frequency of the time windows. Possible - values include: 'Minute', 'Hour' - :type frequency: str or - ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: Required. The interval of the time windows. The minimum - interval allowed is 15 Minutes. - :type interval: int - :param start_time: Required. The start time for the time period for the - trigger during which events are fired for windows that are ready. Only UTC - time is currently supported. - :type start_time: datetime - :param end_time: The end time for the time period for the trigger during - which events are fired for windows that are ready. Only UTC time is - currently supported. - :type end_time: datetime - :param delay: Specifies how long the trigger waits past due time before - triggering new run. It doesn't alter window start and end time. The - default is 0. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type delay: object - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a new run is triggered. - :type max_concurrency: int - :param retry_policy: Retry policy that will be applied for failed pipeline - runs. - :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy - :param depends_on: Triggers that this trigger depends on. Only tumbling - window triggers are supported. - :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] - """ - - _validation = { - 'runtime_state': {'readonly': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - 'start_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, - 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, - 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, - 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, - 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, - 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, - 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, - } - - def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.pipeline = pipeline - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.delay = delay - self.max_concurrency = max_concurrency - self.retry_policy = retry_policy - self.depends_on = depends_on - self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py index eede36501d6c..402d3f737131 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py @@ -16,12 +16,10 @@ class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -29,10 +27,10 @@ class UntilActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - The loop will continue until this expression evaluates to true + :param expression: An expression that would evaluate to Boolean. The loop + will continue until this expression evaluates to true :type expression: ~azure.mgmt.datafactory.models.Expression :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 @@ -41,7 +39,7 @@ class UntilActivity(ControlActivity): string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object - :param activities: Required. List of activities to execute. + :param activities: List of activities to execute. :type activities: list[~azure.mgmt.datafactory.models.Activity] """ @@ -64,9 +62,9 @@ class UntilActivity(ControlActivity): 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, } - def __init__(self, **kwargs): - super(UntilActivity, self).__init__(**kwargs) - self.expression = kwargs.get('expression', None) - self.timeout = kwargs.get('timeout', None) - self.activities = kwargs.get('activities', None) + def __init__(self, name, expression, activities, additional_properties=None, description=None, depends_on=None, user_properties=None, timeout=None): + super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.expression = expression + self.timeout = timeout + self.activities = activities self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py deleted file mode 100644 index 40c03ce18591..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class UntilActivity(ControlActivity): - """This activity executes inner activities until the specified boolean - expression results to true or timeout is reached, whichever is earlier. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param expression: Required. An expression that would evaluate to Boolean. - The loop will continue until this expression evaluates to true - :type expression: ~azure.mgmt.datafactory.models.Expression - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: - string (or Expression with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param activities: Required. List of activities to execute. - :type activities: list[~azure.mgmt.datafactory.models.Activity] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - 'activities': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, - } - - def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: - super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.expression = expression - self.timeout = timeout - self.activities = activities - self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py index c6460310225a..6bb70551971d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py @@ -29,6 +29,6 @@ class UpdateIntegrationRuntimeNodeRequest(Model): 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, } - def __init__(self, **kwargs): - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) + def __init__(self, concurrent_jobs_limit=None): + super(UpdateIntegrationRuntimeNodeRequest, self).__init__() + self.concurrent_jobs_limit = concurrent_jobs_limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py deleted file mode 100644 index de1605885139..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py +++ /dev/null @@ -1,34 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UpdateIntegrationRuntimeNodeRequest(Model): - """Update integration runtime node request. - - :param concurrent_jobs_limit: The number of concurrent jobs permitted to - run on the integration runtime node. Values between 1 and - maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - """ - - _validation = { - 'concurrent_jobs_limit': {'minimum': 1}, - } - - _attribute_map = { - 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, - } - - def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: - super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) - self.concurrent_jobs_limit = concurrent_jobs_limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py index bd5e332b50f5..f4e02900146a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py @@ -32,7 +32,7 @@ class UpdateIntegrationRuntimeRequest(Model): 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } - def __init__(self, **kwargs): - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = kwargs.get('auto_update', None) - self.update_delay_offset = kwargs.get('update_delay_offset', None) + def __init__(self, auto_update=None, update_delay_offset=None): + super(UpdateIntegrationRuntimeRequest, self).__init__() + self.auto_update = auto_update + self.update_delay_offset = update_delay_offset diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py deleted file mode 100644 index 731cb942b472..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UpdateIntegrationRuntimeRequest(Model): - """Update integration runtime request. - - :param auto_update: Enables or disables the auto-update feature of the - self-hosted integration runtime. See - https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: - 'On', 'Off' - :type auto_update: str or - ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., - PT03H is 3 hours. The integration runtime auto update will happen on that - time. - :type update_delay_offset: str - """ - - _attribute_map = { - 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, - 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, - } - - def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: - super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) - self.auto_update = auto_update - self.update_delay_offset = update_delay_offset diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py index b51e313b6f0c..d640439f57e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py @@ -42,10 +42,10 @@ class UserAccessPolicy(Model): 'expire_time': {'key': 'expireTime', 'type': 'str'}, } - def __init__(self, **kwargs): - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = kwargs.get('permissions', None) - self.access_resource_path = kwargs.get('access_resource_path', None) - self.profile_name = kwargs.get('profile_name', None) - self.start_time = kwargs.get('start_time', None) - self.expire_time = kwargs.get('expire_time', None) + def __init__(self, permissions=None, access_resource_path=None, profile_name=None, start_time=None, expire_time=None): + super(UserAccessPolicy, self).__init__() + self.permissions = permissions + self.access_resource_path = access_resource_path + self.profile_name = profile_name + self.start_time = start_time + self.expire_time = expire_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py deleted file mode 100644 index 26e2a7639a09..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py +++ /dev/null @@ -1,51 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UserAccessPolicy(Model): - """Get Data Plane read only token request definition. - - :param permissions: The string with permissions for Data Plane access. - Currently only 'r' is supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to - factory. Currently only empty string is supported which corresponds to the - factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default - is supported. The default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current - time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for - the token is eight hours and by default the token will expire in eight - hours. - :type expire_time: str - """ - - _attribute_map = { - 'permissions': {'key': 'permissions', 'type': 'str'}, - 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, - 'profile_name': {'key': 'profileName', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'expire_time': {'key': 'expireTime', 'type': 'str'}, - } - - def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: - super(UserAccessPolicy, self).__init__(**kwargs) - self.permissions = permissions - self.access_resource_path = access_resource_path - self.profile_name = profile_name - self.start_time = start_time - self.expire_time = expire_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py index 30692d2960ec..4356304f3f08 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py @@ -15,12 +15,10 @@ class UserProperty(Model): """User property. - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. + :param name: User property name. :type name: str - :param value: Required. User property value. Type: string (or Expression - with resultType string). + :param value: User property value. Type: string (or Expression with + resultType string). :type value: object """ @@ -34,7 +32,7 @@ class UserProperty(Model): 'value': {'key': 'value', 'type': 'object'}, } - def __init__(self, **kwargs): - super(UserProperty, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) + def __init__(self, name, value): + super(UserProperty, self).__init__() + self.name = name + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py deleted file mode 100644 index 7b4f3beb0195..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class UserProperty(Model): - """User property. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. User property name. - :type name: str - :param value: Required. User property value. Type: string (or Expression - with resultType string). - :type value: object - """ - - _validation = { - 'name': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__(self, *, name: str, value, **kwargs) -> None: - super(UserProperty, self).__init__(**kwargs) - self.name = name - self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py index 0d92d32c12b0..6642382c3fd8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -15,12 +15,10 @@ class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,7 +26,7 @@ class ValidationActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 @@ -47,7 +45,7 @@ class ValidationActivity(ControlActivity): true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). :type child_items: object - :param dataset: Required. Validation activity dataset reference. + :param dataset: Validation activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -71,11 +69,11 @@ class ValidationActivity(ControlActivity): 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } - def __init__(self, **kwargs): - super(ValidationActivity, self).__init__(**kwargs) - self.timeout = kwargs.get('timeout', None) - self.sleep = kwargs.get('sleep', None) - self.minimum_size = kwargs.get('minimum_size', None) - self.child_items = kwargs.get('child_items', None) - self.dataset = kwargs.get('dataset', None) + def __init__(self, name, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None): + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py deleted file mode 100644 index f4680400b447..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py +++ /dev/null @@ -1,81 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class ValidationActivity(ControlActivity): - """This activity verifies that an external resource exists. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param timeout: Specifies the timeout for the activity to run. If there is - no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 - week as default. Type: string (or Expression with resultType string), - pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: object - :param sleep: A delay in seconds between validation attempts. If no value - is specified, 10 seconds will be used as the default. Type: integer (or - Expression with resultType integer). - :type sleep: object - :param minimum_size: Can be used if dataset points to a file. The file - must be greater than or equal in size to the value specified. Type: - integer (or Expression with resultType integer). - :type minimum_size: object - :param child_items: Can be used if dataset points to a folder. If set to - true, the folder must have at least one file. If set to false, the folder - must be empty. Type: boolean (or Expression with resultType boolean). - :type child_items: object - :param dataset: Required. Validation activity dataset reference. - :type dataset: ~azure.mgmt.datafactory.models.DatasetReference - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, - 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, - 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, - 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - } - - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: - super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.timeout = timeout - self.sleep = sleep - self.minimum_size = minimum_size - self.child_items = child_items - self.dataset = dataset - self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py index 6d7fd808fa44..aac68001fe4c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py @@ -15,10 +15,8 @@ class VariableSpecification(Model): """Definition of a single variable for a Pipeline. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: 'String', - 'Bool', 'Array' + :param type: Variable type. Possible values include: 'String', 'Bool', + 'Array' :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. :type default_value: object @@ -33,7 +31,7 @@ class VariableSpecification(Model): 'default_value': {'key': 'defaultValue', 'type': 'object'}, } - def __init__(self, **kwargs): - super(VariableSpecification, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.default_value = kwargs.get('default_value', None) + def __init__(self, type, default_value=None): + super(VariableSpecification, self).__init__() + self.type = type + self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py deleted file mode 100644 index d60b3b4b1591..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class VariableSpecification(Model): - """Definition of a single variable for a Pipeline. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Variable type. Possible values include: 'String', - 'Bool', 'Array' - :type type: str or ~azure.mgmt.datafactory.models.VariableType - :param default_value: Default value of variable. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__(self, *, type, default_value=None, **kwargs) -> None: - super(VariableSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py index 6b5e8d0103f5..c7898ef92a77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -15,8 +15,6 @@ class VerticaLinkedService(LinkedService): """Vertica linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class VerticaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -61,9 +59,9 @@ class VerticaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(VerticaLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): + super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py deleted file mode 100644 index 3aee3a5ae0f6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class VerticaLinkedService(LinkedService): - """Vertica linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection - string. - :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: - super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py index d0b642f15d38..8a9d03f58a3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -15,8 +15,6 @@ class VerticaSource(CopySource): """A copy activity Vertica source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class VerticaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class VerticaSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(VerticaSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py deleted file mode 100644 index a1c4d755f2b4..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class VerticaSource(CopySource): - """A copy activity Vertica source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py index 151a0d000e3f..6463481f2e9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -15,8 +15,6 @@ class VerticaTableDataset(Dataset): """Vertica dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class VerticaTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class VerticaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -74,9 +72,9 @@ class VerticaTableDataset(Dataset): 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, **kwargs): - super(VerticaTableDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None): + super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name + self.table = table + self.vertica_table_dataset_schema = vertica_table_dataset_schema self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py deleted file mode 100644 index 4c2fc8da32ad..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py +++ /dev/null @@ -1,82 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class VerticaTableDataset(Dataset): - """Vertica dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: This property will be retired. Please consider using - schema + table properties instead. - :type table_name: object - :param table: The table name of the Vertica. Type: string (or Expression - with resultType string). - :type table: object - :param vertica_table_dataset_schema: The schema name of the Vertica. Type: - string (or Expression with resultType string). - :type vertica_table_dataset_schema: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: - super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.table = table - self.vertica_table_dataset_schema = vertica_table_dataset_schema - self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py index 91f3decc7473..88b4ec622a20 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py @@ -15,12 +15,10 @@ class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,9 +26,9 @@ class WaitActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param wait_time_in_seconds: Required. Duration in seconds. + :param wait_time_in_seconds: Duration in seconds. :type wait_time_in_seconds: int """ @@ -50,7 +48,7 @@ class WaitActivity(ControlActivity): 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, } - def __init__(self, **kwargs): - super(WaitActivity, self).__init__(**kwargs) - self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) + def __init__(self, name, wait_time_in_seconds, additional_properties=None, description=None, depends_on=None, user_properties=None): + super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.wait_time_in_seconds = wait_time_in_seconds self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py deleted file mode 100644 index ff85c9d16733..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py +++ /dev/null @@ -1,56 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class WaitActivity(ControlActivity): - """This activity suspends pipeline execution for the specified interval. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'wait_time_in_seconds': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, - } - - def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: - super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.wait_time_in_seconds = wait_time_in_seconds - self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py index 70264719d52e..cf7b9d40df3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py @@ -15,12 +15,10 @@ class WebActivity(ExecutionActivity): """Web activity. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,18 +26,18 @@ class WebActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE' + :param method: Rest API method for target endpoint. Possible values + include: 'GET', 'POST', 'PUT', 'DELETE' :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string - (or Expression with resultType string). + :param url: Web activity target endpoint and path. Type: string (or + Expression with resultType string). :type url: object :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { @@ -86,13 +84,13 @@ class WebActivity(ExecutionActivity): 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, } - def __init__(self, **kwargs): - super(WebActivity, self).__init__(**kwargs) - self.method = kwargs.get('method', None) - self.url = kwargs.get('url', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) + def __init__(self, name, method, url, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None): + super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) + self.method = method + self.url = url + self.headers = headers + self.body = body + self.authentication = authentication + self.datasets = datasets + self.linked_services = linked_services self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py index 6ebb193ae5e9..3f2d35b9b62e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py @@ -15,10 +15,7 @@ class WebActivityAuthentication(Model): """Web activity authentication properties. - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) + :param type: Web activity authentication (Basic/ClientCertificate/MSI) :type type: str :param pfx: Base64-encoded contents of a PFX file. :type pfx: ~azure.mgmt.datafactory.models.SecureString @@ -44,10 +41,10 @@ class WebActivityAuthentication(Model): 'resource': {'key': 'resource', 'type': 'str'}, } - def __init__(self, **kwargs): - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.pfx = kwargs.get('pfx', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.resource = kwargs.get('resource', None) + def __init__(self, type, pfx=None, username=None, password=None, resource=None): + super(WebActivityAuthentication, self).__init__() + self.type = type + self.pfx = pfx + self.username = username + self.password = password + self.resource = resource diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py deleted file mode 100644 index 4c2b68ba7161..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class WebActivityAuthentication(Model): - """Web activity authentication properties. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Web activity authentication - (Basic/ClientCertificate/MSI) - :type type: str - :param pfx: Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecureString - :param username: Web activity authentication user name for basic - authentication. - :type username: str - :param password: Password for the PFX file or basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecureString - :param resource: Resource for which Azure Auth token will be requested - when using MSI Authentication. - :type resource: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecureString'}, - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'SecureString'}, - 'resource': {'key': 'resource', 'type': 'str'}, - } - - def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: - super(WebActivityAuthentication, self).__init__(**kwargs) - self.type = type - self.pfx = pfx - self.username = username - self.password = password - self.resource = resource diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py deleted file mode 100644 index 9a64114a00c6..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py +++ /dev/null @@ -1,98 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .execution_activity_py3 import ExecutionActivity - - -class WebActivity(ExecutionActivity): - """Web activity. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible - values include: 'GET', 'POST', 'PUT', 'DELETE' - :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Required. Web activity target endpoint and path. Type: string - (or Expression with resultType string). - :type url: object - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - :param datasets: List of datasets passed to web endpoint. - :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] - :param linked_services: List of linked services passed to web endpoint. - :type linked_services: - list[~azure.mgmt.datafactory.models.LinkedServiceReference] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, - 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, - } - - def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: - super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.method = method - self.url = url - self.headers = headers - self.body = body - self.authentication = authentication - self.datasets = datasets - self.linked_services = linked_services - self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py index d3bd2f2594ab..4b95d5b488ff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py @@ -16,13 +16,11 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. + :param url: The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Required. Constant filled by server. + :param authentication_type: Constant filled by server. :type authentication_type: str """ @@ -31,11 +29,6 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): 'authentication_type': {'required': True}, } - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebAnonymousAuthentication, self).__init__(**kwargs) + def __init__(self, url): + super(WebAnonymousAuthentication, self).__init__(url=url) self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py deleted file mode 100644 index ee7a4e780a1f..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - - -class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses anonymous authentication to communicate with - an HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - def __init__(self, *, url, **kwargs) -> None: - super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py index 90050f7dae28..f32f30499a14 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py @@ -16,18 +16,16 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. + :param url: The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Required. Constant filled by server. + :param authentication_type: Constant filled by server. :type authentication_type: str - :param username: Required. User name for Basic authentication. Type: - string (or Expression with resultType string). + :param username: User name for Basic authentication. Type: string (or + Expression with resultType string). :type username: object - :param password: Required. The password for Basic authentication. + :param password: The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -45,8 +43,8 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): 'password': {'key': 'password', 'type': 'SecretBase'}, } - def __init__(self, **kwargs): - super(WebBasicAuthentication, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) + def __init__(self, url, username, password): + super(WebBasicAuthentication, self).__init__(url=url) + self.username = username + self.password = password self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py deleted file mode 100644 index 71577ec86565..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - - -class WebBasicAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses basic authentication to communicate with an - HTTP endpoint. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param username: Required. User name for Basic authentication. Type: - string (or Expression with resultType string). - :type username: object - :param password: Required. The password for Basic authentication. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'object'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, username, password, **kwargs) -> None: - super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.username = username - self.password = password - self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py index 671808ca85d1..2f4103a772ca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py @@ -18,17 +18,15 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): authentication; the server must also provide valid credentials to the client. - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. + :param url: The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Required. Constant filled by server. + :param authentication_type: Constant filled by server. :type authentication_type: str - :param pfx: Required. Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file. :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. + :param password: Password for the PFX file. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -46,8 +44,8 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): 'password': {'key': 'password', 'type': 'SecretBase'}, } - def __init__(self, **kwargs): - super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.pfx = kwargs.get('pfx', None) - self.password = kwargs.get('password', None) + def __init__(self, url, pfx, password): + super(WebClientCertificateAuthentication, self).__init__(url=url) + self.pfx = pfx + self.password = password self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py deleted file mode 100644 index 7ac859b677a8..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py +++ /dev/null @@ -1,53 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties - - -class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): - """A WebLinkedService that uses client certificate based authentication to - communicate with an HTTP endpoint. This scheme follows mutual - authentication; the server must also provide valid credentials to the - client. - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - :param pfx: Required. Base64-encoded contents of a PFX file. - :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Required. Password for the PFX file. - :type password: ~azure.mgmt.datafactory.models.SecretBase - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - 'pfx': {'required': True}, - 'password': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, - 'password': {'key': 'password', 'type': 'SecretBase'}, - } - - def __init__(self, *, url, pfx, password, **kwargs) -> None: - super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.pfx = pfx - self.password = password - self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py index 1c648c42c3e2..738430128169 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -18,12 +18,10 @@ class WebHookActivity(ControlActivity): Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Activity name. :type name: str :param description: Activity description. :type description: str @@ -31,13 +29,12 @@ class WebHookActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :ivar method: Required. Rest API method for target endpoint. Default - value: "POST" . + :ivar method: Rest API method for target endpoint. Default value: "POST" . :vartype method: str - :param url: Required. WebHook activity target endpoint and path. Type: - string (or Expression with resultType string). + :param url: WebHook activity target endpoint and path. Type: string (or + Expression with resultType string). :type url: object :param timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: @@ -82,11 +79,11 @@ class WebHookActivity(ControlActivity): method = "POST" - def __init__(self, **kwargs): - super(WebHookActivity, self).__init__(**kwargs) - self.url = kwargs.get('url', None) - self.timeout = kwargs.get('timeout', None) - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) - self.authentication = kwargs.get('authentication', None) + def __init__(self, name, url, additional_properties=None, description=None, depends_on=None, user_properties=None, timeout=None, headers=None, body=None, authentication=None): + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py deleted file mode 100644 index 40cdc6f732da..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .control_activity_py3 import ControlActivity - - -class WebHookActivity(ControlActivity): - """WebHook activity. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :ivar method: Required. Rest API method for target endpoint. Default - value: "POST" . - :vartype method: str - :param url: Required. WebHook activity target endpoint and path. Type: - string (or Expression with resultType string). - :type url: object - :param timeout: The timeout within which the webhook should be called - back. If there is no value specified, it defaults to 10 minutes. Type: - string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type timeout: str - :param headers: Represents the headers that will be sent to the request. - For example, to set the language and type on a request: "headers" : { - "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: - string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. - Required for POST/PUT method, not allowed for GET method Type: string (or - Expression with resultType string). - :type body: object - :param authentication: Authentication method used for calling the - endpoint. - :type authentication: - ~azure.mgmt.datafactory.models.WebActivityAuthentication - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, - 'url': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, - 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, - } - - method = "POST" - - def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: - super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.url = url - self.timeout = timeout - self.headers = headers - self.body = body - self.authentication = authentication - self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py index 18fadba3f3ee..990c613e5b0a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -15,8 +15,6 @@ class WebLinkedService(LinkedService): """Web linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class WebLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param type_properties: Required. Web linked service properties. + :param type_properties: Web linked service properties. :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ @@ -53,7 +51,7 @@ class WebLinkedService(LinkedService): 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, } - def __init__(self, **kwargs): - super(WebLinkedService, self).__init__(**kwargs) - self.type_properties = kwargs.get('type_properties', None) + def __init__(self, type_properties, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): + super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.type_properties = type_properties self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py deleted file mode 100644 index 3e491b0fac4d..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class WebLinkedService(LinkedService): - """Web linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param type_properties: Required. Web linked service properties. - :type type_properties: - ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties - """ - - _validation = { - 'type': {'required': True}, - 'type_properties': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, - } - - def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type_properties = type_properties - self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py index 22290e80b19f..684401273413 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py @@ -20,13 +20,11 @@ class WebLinkedServiceTypeProperties(Model): sub-classes are: WebClientCertificateAuthentication, WebBasicAuthentication, WebAnonymousAuthentication - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. + :param url: The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Required. Constant filled by server. + :param authentication_type: Constant filled by server. :type authentication_type: str """ @@ -44,7 +42,7 @@ class WebLinkedServiceTypeProperties(Model): 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} } - def __init__(self, **kwargs): - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = kwargs.get('url', None) + def __init__(self, url): + super(WebLinkedServiceTypeProperties, self).__init__() + self.url = url self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py deleted file mode 100644 index 1c162c2f1004..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py +++ /dev/null @@ -1,50 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class WebLinkedServiceTypeProperties(Model): - """Base definition of WebLinkedServiceTypeProperties, this typeProperties is - polymorphic based on authenticationType, so not flattened in SDK models. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: WebClientCertificateAuthentication, - WebBasicAuthentication, WebAnonymousAuthentication - - All required parameters must be populated in order to send to Azure. - - :param url: Required. The URL of the web service endpoint, e.g. - http://www.microsoft.com . Type: string (or Expression with resultType - string). - :type url: object - :param authentication_type: Required. Constant filled by server. - :type authentication_type: str - """ - - _validation = { - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'url': {'key': 'url', 'type': 'object'}, - 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, - } - - _subtype_map = { - 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} - } - - def __init__(self, *, url, **kwargs) -> None: - super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) - self.url = url - self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py index c5d3a2a8f00a..9e8a62e52127 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -15,8 +15,6 @@ class WebSource(CopySource): """A copy activity source for web page table. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class WebSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str """ @@ -39,14 +37,6 @@ class WebSource(CopySource): 'type': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(WebSource, self).__init__(**kwargs) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None): + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py deleted file mode 100644 index 684e1d4233cc..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py +++ /dev/null @@ -1,52 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class WebSource(CopySource): - """A copy activity source for web page table. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py index 3980fe3d885a..4933b4aa17c8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py @@ -15,8 +15,6 @@ class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class WebTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,10 +39,10 @@ class WebTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param index: Required. The zero-based index of the table in the web page. - Type: integer (or Expression with resultType integer), minimum: 0. + :param index: The zero-based index of the table in the web page. Type: + integer (or Expression with resultType integer), minimum: 0. :type index: object :param path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). @@ -71,8 +69,8 @@ class WebTableDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, **kwargs): - super(WebTableDataset, self).__init__(**kwargs) - self.index = kwargs.get('index', None) - self.path = kwargs.get('path', None) + def __init__(self, linked_service_name, index, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None): + super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.index = index + self.path = path self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py deleted file mode 100644 index edb2344c35d2..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py +++ /dev/null @@ -1,78 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class WebTableDataset(Dataset): - """The dataset points to a HTML table in the web page. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param index: Required. The zero-based index of the table in the web page. - Type: integer (or Expression with resultType integer), minimum: 0. - :type index: object - :param path: The relative URL to the web page from the linked service URL. - Type: string (or Expression with resultType string). - :type path: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - 'index': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'index': {'key': 'typeProperties.index', 'type': 'object'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: - super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.index = index - self.path = path - self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py index 24973f577133..afdac985e31f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -15,8 +15,6 @@ class XeroLinkedService(LinkedService): """Xero Service linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,10 +29,9 @@ class XeroLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param host: Required. The endpoint of the Xero server. (i.e. - api.xero.com) + :param host: The endpoint of the Xero server. (i.e. api.xero.com) :type host: object :param consumer_key: The consumer key associated with the Xero application. @@ -81,13 +78,13 @@ class XeroLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(XeroLinkedService, self).__init__(**kwargs) - self.host = kwargs.get('host', None) - self.consumer_key = kwargs.get('consumer_key', None) - self.private_key = kwargs.get('private_key', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.host = host + self.consumer_key = consumer_key + self.private_key = private_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py deleted file mode 100644 index 433c65ade739..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py +++ /dev/null @@ -1,93 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class XeroLinkedService(LinkedService): - """Xero Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param host: Required. The endpoint of the Xero server. (i.e. - api.xero.com) - :type host: object - :param consumer_key: The consumer key associated with the Xero - application. - :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase - :param private_key: The private key from the .pem file that was generated - for your Xero private application. You must include all the text from the - .pem file, including the Unix line endings( - ). - :type private_key: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, - 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.host = host - self.consumer_key = consumer_key - self.private_key = private_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py index 53c5edd44cec..d303efa94b46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py @@ -15,8 +15,6 @@ class XeroObjectDataset(Dataset): """Xero Service dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class XeroObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class XeroObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class XeroObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(XeroObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py deleted file mode 100644 index 673d41e1771e..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class XeroObjectDataset(Dataset): - """Xero Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py index a37852a5b419..2fa6679fda05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -15,8 +15,6 @@ class XeroSource(CopySource): """A copy activity Xero Service source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class XeroSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class XeroSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(XeroSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py deleted file mode 100644 index bbee6c6fa1f0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class XeroSource(CopySource): - """A copy activity Xero Service source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py index fe34dff77ea9..e4497be6cb19 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -15,8 +15,6 @@ class ZohoLinkedService(LinkedService): """Zoho server linked service. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,9 +29,9 @@ class ZohoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str - :param endpoint: Required. The endpoint of the Zoho server. (i.e. + :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) :type endpoint: object :param access_token: The access token for Zoho authentication. @@ -74,12 +72,12 @@ class ZohoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ZohoLinkedService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + def __init__(self, endpoint, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): + super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.endpoint = endpoint + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py deleted file mode 100644 index f82f6221592b..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .linked_service_py3 import LinkedService - - -class ZohoLinkedService(LinkedService): - """Zoho server linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - linked service. - :type annotations: list[object] - :param type: Required. Constant filled by server. - :type type: str - :param endpoint: Required. The endpoint of the Zoho server. (i.e. - crm.zoho.com/crm/private) - :type endpoint: object - :param access_token: The access token for Zoho authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source - endpoints are encrypted using HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name - in the server's certificate to match the host name of the server when - connecting over SSL. The default value is true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of - the server when connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: - super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.endpoint = endpoint - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py index 062d508860a6..531272808f3c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py @@ -15,8 +15,6 @@ class ZohoObjectDataset(Dataset): """Zoho server dataset. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +27,7 @@ class ZohoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Required. Linked service reference. + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -41,7 +39,7 @@ class ZohoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -66,7 +64,7 @@ class ZohoObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ZohoObjectDataset, self).__init__(**kwargs) - self.table_name = kwargs.get('table_name', None) + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): + super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + self.table_name = table_name self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py deleted file mode 100644 index ef5a67d4fe35..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .dataset_py3 import Dataset - - -class ZohoObjectDataset(Dataset): - """Zoho server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: - DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, - ~azure.mgmt.datafactory.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the - Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, - Dataset will appear at the root level. - :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Required. Constant filled by server. - :type type: str - :param table_name: The table name. Type: string (or Expression with - resultType string). - :type table_name: object - """ - - _validation = { - 'linked_service_name': {'required': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'type': {'key': 'type', 'type': 'str'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: - super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.table_name = table_name - self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py index 274c6fc09f19..763b95296d1a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -15,8 +15,6 @@ class ZohoSource(CopySource): """A copy activity Zoho server source. - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +29,7 @@ class ZohoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Required. Constant filled by server. + :param type: Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -51,7 +49,7 @@ class ZohoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, **kwargs): - super(ZohoSource, self).__init__(**kwargs) - self.query = kwargs.get('query', None) + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py deleted file mode 100644 index 6d7dc29bdf8a..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_source_py3 import CopySource - - -class ZohoSource(CopySource): - """A copy activity Zoho server source. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param source_retry_count: Source retry count. Type: integer (or - Expression with resultType integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression - with resultType string), pattern: - ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). - :type query: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.query = query - self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py index f338a1a9c835..ec8288ad562a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py @@ -76,7 +76,6 @@ def query_by_pipeline_run( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -89,8 +88,9 @@ def query_by_pipeline_run( body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py index 278815d03479..d2768372d196 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py @@ -77,7 +77,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -86,8 +86,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -151,7 +152,6 @@ def create_or_update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -166,8 +166,9 @@ def create_or_update( body_content = self._serialize.body(dataset, 'DatasetResource') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -226,7 +227,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -237,8 +238,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -292,6 +293,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -300,8 +302,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py index 4a648d96586c..bacdfbf2d591 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -73,7 +73,6 @@ def get_feature_value( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -86,8 +85,9 @@ def get_feature_value( body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -145,7 +145,6 @@ def get_feature_value_by_factory( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -158,8 +157,9 @@ def get_feature_value_by_factory( body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py index b06c12f3e8c5..938fded17c2e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py @@ -71,7 +71,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -80,8 +80,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -138,7 +139,6 @@ def configure_factory_repo( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -151,8 +151,9 @@ def configure_factory_repo( body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -208,7 +209,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -217,8 +218,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -277,7 +279,6 @@ def create_or_update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -292,8 +293,9 @@ def create_or_update( body_content = self._serialize.body(factory, 'Factory') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -351,7 +353,6 @@ def update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -364,8 +365,9 @@ def update( body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') # Construct and send request - request = self._client.patch(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -421,7 +423,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -432,8 +434,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -484,6 +486,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -492,8 +495,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -542,7 +545,6 @@ def get_git_hub_access_token( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -555,8 +557,9 @@ def get_git_hub_access_token( body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -610,7 +613,6 @@ def get_data_plane_access( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -623,8 +625,9 @@ def get_data_plane_access( body_content = self._serialize.body(policy, 'UserAccessPolicy') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py index 81467b9e3385..c5609f8e3f57 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py @@ -78,7 +78,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -87,8 +87,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -145,6 +145,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -153,8 +154,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -213,7 +214,6 @@ def update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -226,8 +226,9 @@ def update( body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') # Construct and send request - request = self._client.patch(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -286,7 +287,7 @@ def get_ip_address( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -295,8 +296,8 @@ def get_ip_address( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py index 230f12d023c3..b997792f6800 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling +from msrest.exceptions import DeserializationError +from msrestazure.azure_operation import AzureOperationPoller from .. import models @@ -58,7 +58,7 @@ def _refresh_initial( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -67,8 +67,8 @@ def _refresh_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -87,7 +87,7 @@ def _refresh_initial( return deserialized def refresh( - self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): """Refresh a SSIS integration runtime object metadata. :param resource_group_name: The resource group name. @@ -97,17 +97,13 @@ def refresh( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - SsisObjectMetadataStatusResponse or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns + SsisObjectMetadataStatusResponse or ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse]] + or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._refresh_initial( @@ -118,8 +114,30 @@ def refresh( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + deserialized = self._deserialize('SsisObjectMetadataStatusResponse', response) if raw: @@ -128,13 +146,12 @@ def get_long_running_output(response): return deserialized - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} def get( @@ -181,7 +198,6 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -197,8 +213,9 @@ def get( body_content = None # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py index 0a64be3b1441..1a2ab3eb168b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling +from msrest.exceptions import DeserializationError +from msrestazure.azure_operation import AzureOperationPoller from .. import models @@ -79,7 +79,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -88,8 +88,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -153,7 +154,6 @@ def create_or_update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -168,8 +168,9 @@ def create_or_update( body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -228,7 +229,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -239,8 +240,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -307,7 +308,6 @@ def update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -320,8 +320,9 @@ def update( body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') # Construct and send request - request = self._client.patch(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -375,6 +376,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -383,8 +385,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -434,7 +436,7 @@ def get_status( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -443,8 +445,8 @@ def get_status( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -502,7 +504,7 @@ def get_connection_info( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -511,8 +513,8 @@ def get_connection_info( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -573,7 +575,6 @@ def regenerate_auth_key( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -586,8 +587,9 @@ def regenerate_auth_key( body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -642,7 +644,7 @@ def list_auth_keys( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -651,8 +653,8 @@ def list_auth_keys( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -690,7 +692,7 @@ def _start_initial( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -699,8 +701,8 @@ def _start_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -719,7 +721,7 @@ def _start_initial( return deserialized def start( - self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): """Starts a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -729,17 +731,13 @@ def start( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - IntegrationRuntimeStatusResponse or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns + IntegrationRuntimeStatusResponse or ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse]] + or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._start_initial( @@ -750,8 +748,30 @@ def start( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) if raw: @@ -760,13 +780,12 @@ def get_long_running_output(response): return deserialized - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} @@ -788,6 +807,7 @@ def _stop_initial( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -796,8 +816,8 @@ def _stop_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -809,7 +829,7 @@ def _stop_initial( return client_raw_response def stop( - self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): """Stops a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -819,14 +839,12 @@ def stop( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns None or + ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._stop_initial( @@ -837,19 +855,40 @@ def stop( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} def sync_credentials( @@ -892,6 +931,7 @@ def sync_credentials( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -900,8 +940,8 @@ def sync_credentials( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -952,7 +992,7 @@ def get_monitoring_data( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -961,8 +1001,8 @@ def get_monitoring_data( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -1017,6 +1057,7 @@ def upgrade( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -1025,8 +1066,8 @@ def upgrade( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -1091,8 +1132,9 @@ def remove_links( body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -1147,7 +1189,6 @@ def create_linked_integration_runtime( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -1160,8 +1201,9 @@ def create_linked_integration_runtime( body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py index e6878336df91..98ef184787ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py @@ -77,7 +77,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -86,8 +86,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -151,7 +152,6 @@ def create_or_update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -166,8 +166,9 @@ def create_or_update( body_content = self._serialize.body(linked_service, 'LinkedServiceResource') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -226,7 +227,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -237,8 +238,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -292,6 +293,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -300,8 +302,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py index 2273e12d5ada..8af242714177 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py @@ -67,7 +67,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -76,8 +76,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py index de8744612d20..3ce268e54664 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py @@ -73,7 +73,6 @@ def query_by_factory( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -86,8 +85,9 @@ def query_by_factory( body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -142,7 +142,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -151,8 +151,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -211,6 +211,7 @@ def cancel( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -219,8 +220,8 @@ def cancel( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py index 343396e705ac..b0f0cb0ec42f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -77,7 +77,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -86,8 +86,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -149,7 +150,6 @@ def create_or_update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -164,8 +164,9 @@ def create_or_update( body_content = self._serialize.body(pipeline, 'PipelineResource') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -224,7 +225,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -235,8 +236,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -290,6 +291,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -298,8 +300,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -367,7 +369,6 @@ def create_run( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -383,8 +384,9 @@ def create_run( body_content = None # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py index 58e0066a60dd..d907454f4d93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling +from msrest.exceptions import DeserializationError +from msrestazure.azure_operation import AzureOperationPoller from .. import models @@ -82,7 +82,6 @@ def create( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -95,8 +94,9 @@ def create( body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -135,6 +135,7 @@ def _start_initial( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -143,8 +144,8 @@ def _start_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -156,7 +157,7 @@ def _start_initial( return client_raw_response def start( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): """Starts a trigger. :param resource_group_name: The resource group name. @@ -168,14 +169,12 @@ def start( :param rerun_trigger_name: The rerun trigger name. :type rerun_trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns None or + ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._start_initial( @@ -187,19 +186,40 @@ def start( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} @@ -222,6 +242,7 @@ def _stop_initial( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -230,8 +251,8 @@ def _stop_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -243,7 +264,7 @@ def _stop_initial( return client_raw_response def stop( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): """Stops a trigger. :param resource_group_name: The resource group name. @@ -255,14 +276,12 @@ def stop( :param rerun_trigger_name: The rerun trigger name. :type rerun_trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns None or + ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._stop_initial( @@ -274,19 +293,40 @@ def stop( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} @@ -309,6 +349,7 @@ def _cancel_initial( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -317,8 +358,8 @@ def _cancel_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -330,7 +371,7 @@ def _cancel_initial( return client_raw_response def cancel( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): """Cancels a trigger. :param resource_group_name: The resource group name. @@ -342,14 +383,12 @@ def cancel( :param rerun_trigger_name: The rerun trigger name. :type rerun_trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns None or + ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._cancel_initial( @@ -361,19 +400,40 @@ def cancel( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} def list_by_trigger( @@ -419,7 +479,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -428,8 +488,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py index e4e4774ae3bc..c0e9560f0a6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py @@ -75,6 +75,7 @@ def rerun( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -83,8 +84,8 @@ def rerun( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -132,7 +133,6 @@ def query_by_factory( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -145,8 +145,9 @@ def query_by_factory( body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py index d6a2d51cf85a..dc6791d33b40 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling +from msrest.exceptions import DeserializationError +from msrestazure.azure_operation import AzureOperationPoller from .. import models @@ -79,7 +79,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -88,8 +88,9 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send( + request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -153,7 +154,6 @@ def create_or_update( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -168,8 +168,9 @@ def create_or_update( body_content = self._serialize.body(trigger, 'TriggerResource') # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -228,7 +229,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -239,8 +240,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -294,6 +295,7 @@ def delete( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -302,8 +304,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -334,7 +336,7 @@ def _subscribe_to_events_initial( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -343,8 +345,8 @@ def _subscribe_to_events_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -363,7 +365,7 @@ def _subscribe_to_events_initial( return deserialized def subscribe_to_events( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): """Subscribe event trigger to events. :param resource_group_name: The resource group name. @@ -373,17 +375,13 @@ def subscribe_to_events( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - TriggerSubscriptionOperationStatus or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns + TriggerSubscriptionOperationStatus or ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] + or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._subscribe_to_events_initial( @@ -394,8 +392,30 @@ def subscribe_to_events( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) if raw: @@ -404,13 +424,12 @@ def get_long_running_output(response): return deserialized - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} def get_event_subscription_status( @@ -451,7 +470,7 @@ def get_event_subscription_status( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -460,8 +479,8 @@ def get_event_subscription_status( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -499,7 +518,7 @@ def _unsubscribe_from_events_initial( # Construct headers header_parameters = {} - header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -508,8 +527,8 @@ def _unsubscribe_from_events_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -528,7 +547,7 @@ def _unsubscribe_from_events_initial( return deserialized def unsubscribe_from_events( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): """Unsubscribe event trigger from events. :param resource_group_name: The resource group name. @@ -538,17 +557,13 @@ def unsubscribe_from_events( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns - TriggerSubscriptionOperationStatus or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns + TriggerSubscriptionOperationStatus or ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] + or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._unsubscribe_from_events_initial( @@ -559,8 +574,30 @@ def unsubscribe_from_events( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) if raw: @@ -569,13 +606,12 @@ def get_long_running_output(response): return deserialized - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} @@ -597,6 +633,7 @@ def _start_initial( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -605,8 +642,8 @@ def _start_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -618,7 +655,7 @@ def _start_initial( return client_raw_response def start( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): """Starts a trigger. :param resource_group_name: The resource group name. @@ -628,14 +665,12 @@ def start( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns None or + ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._start_initial( @@ -646,19 +681,40 @@ def start( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} @@ -680,6 +736,7 @@ def _stop_initial( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -688,8 +745,8 @@ def _stop_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -701,7 +758,7 @@ def _stop_initial( return client_raw_response def stop( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): """Stops a trigger. :param resource_group_name: The resource group name. @@ -711,14 +768,12 @@ def stop( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True + :param bool raw: returns the direct response alongside the + deserialized response + :return: An instance of AzureOperationPoller that returns None or + ClientRawResponse if raw=true :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError` """ raw_result = self._stop_initial( @@ -729,17 +784,38 @@ def stop( raw=True, **operation_config ) + if raw: + return raw_result + + # Construct and send request + def long_running_send(): + return raw_result.response + + def get_long_running_status(status_link, headers=None): + + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + header_parameters = {} + header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] + return self._client.send( + request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - lro_delay = operation_config.get( + long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} diff --git a/swagger_to_sdk_config.json b/swagger_to_sdk_config.json index bae8c63cefa0..d43f1d077fa2 100644 --- a/swagger_to_sdk_config.json +++ b/swagger_to_sdk_config.json @@ -3,7 +3,7 @@ "meta": { "autorest_options": { "version": "preview", - "use": "@microsoft.azure/autorest.python@~4.0.71", + "use": "@microsoft.azure/autorest.python@~2.1.40", "python": "", "python-mode": "update", "sdkrel:python-sdks-folder": "./sdk/.", From d3ecd625781df08b15d493fe3d3f3d5db1270704 Mon Sep 17 00:00:00 2001 From: Zim Kalinowski Date: Fri, 30 Aug 2019 12:35:19 +0000 Subject: [PATCH 27/30] v3.0.52 --- .../data_factory_management_client.py | 6 +- .../azure/mgmt/datafactory/models/__init__.py | 1547 +++++++++++------ .../models/access_policy_response.py | 10 +- .../models/access_policy_response_py3.py | 36 + .../azure/mgmt/datafactory/models/activity.py | 20 +- .../datafactory/models/activity_dependency.py | 17 +- .../models/activity_dependency_py3.py | 46 + .../datafactory/models/activity_policy.py | 16 +- .../datafactory/models/activity_policy_py3.py | 59 + .../mgmt/datafactory/models/activity_py3.py | 63 + .../mgmt/datafactory/models/activity_run.py | 6 +- .../datafactory/models/activity_run_py3.py | 102 ++ .../models/activity_runs_query_response.py | 12 +- .../activity_runs_query_response_py3.py | 39 + .../models/amazon_mws_linked_service.py | 40 +- .../models/amazon_mws_linked_service_py3.py | 106 ++ .../models/amazon_mws_object_dataset.py | 12 +- .../models/amazon_mws_object_dataset_py3.py | 72 + .../datafactory/models/amazon_mws_source.py | 10 +- .../models/amazon_mws_source_py3.py | 57 + .../models/amazon_redshift_linked_service.py | 28 +- .../amazon_redshift_linked_service_py3.py | 86 + .../models/amazon_redshift_source.py | 12 +- .../models/amazon_redshift_source_py3.py | 65 + .../models/amazon_redshift_table_dataset.py | 16 +- .../amazon_redshift_table_dataset_py3.py | 82 + .../datafactory/models/amazon_s3_dataset.py | 30 +- .../models/amazon_s3_dataset_py3.py | 107 ++ .../models/amazon_s3_linked_service.py | 16 +- .../models/amazon_s3_linked_service_py3.py | 77 + .../datafactory/models/amazon_s3_location.py | 12 +- .../models/amazon_s3_location_py3.py | 55 + .../models/amazon_s3_read_settings.py | 22 +- .../models/amazon_s3_read_settings_py3.py | 78 + .../models/append_variable_activity.py | 14 +- .../models/append_variable_activity_py3.py | 60 + .../mgmt/datafactory/models/avro_dataset.py | 18 +- .../datafactory/models/avro_dataset_py3.py | 83 + .../mgmt/datafactory/models/avro_format.py | 15 +- .../datafactory/models/avro_format_py3.py | 46 + .../mgmt/datafactory/models/avro_sink.py | 12 +- .../mgmt/datafactory/models/avro_sink_py3.py | 69 + .../mgmt/datafactory/models/avro_source.py | 10 +- .../datafactory/models/avro_source_py3.py | 56 + .../datafactory/models/avro_write_settings.py | 12 +- .../models/avro_write_settings_py3.py | 46 + .../models/azure_batch_linked_service.py | 35 +- .../models/azure_batch_linked_service_py3.py | 88 + .../datafactory/models/azure_blob_dataset.py | 24 +- .../models/azure_blob_dataset_py3.py | 100 ++ .../models/azure_blob_fs_dataset.py | 18 +- .../models/azure_blob_fs_dataset_py3.py | 85 + .../models/azure_blob_fs_linked_service.py | 24 +- .../azure_blob_fs_linked_service_py3.py | 86 + .../models/azure_blob_fs_location.py | 10 +- .../models/azure_blob_fs_location_py3.py | 50 + .../models/azure_blob_fs_read_settings.py | 20 +- .../models/azure_blob_fs_read_settings_py3.py | 73 + .../datafactory/models/azure_blob_fs_sink.py | 10 +- .../models/azure_blob_fs_sink_py3.py | 65 + .../models/azure_blob_fs_source.py | 14 +- .../models/azure_blob_fs_source_py3.py | 68 + .../models/azure_blob_fs_write_settings.py | 10 +- .../azure_blob_fs_write_settings_py3.py | 51 + .../azure_blob_storage_linked_service.py | 26 +- .../azure_blob_storage_linked_service_py3.py | 104 ++ .../models/azure_blob_storage_location.py | 10 +- .../models/azure_blob_storage_location_py3.py | 50 + .../azure_blob_storage_read_settings.py | 20 +- .../azure_blob_storage_read_settings_py3.py | 73 + .../azure_blob_storage_write_settings.py | 10 +- .../azure_blob_storage_write_settings_py3.py | 51 + .../azure_data_explorer_command_activity.py | 19 +- ...zure_data_explorer_command_activity_py3.py | 71 + .../azure_data_explorer_linked_service.py | 40 +- .../azure_data_explorer_linked_service_py3.py | 86 + .../models/azure_data_explorer_sink.py | 14 +- .../models/azure_data_explorer_sink_py3.py | 76 + .../models/azure_data_explorer_source.py | 18 +- .../models/azure_data_explorer_source_py3.py | 70 + .../azure_data_explorer_table_dataset.py | 12 +- .../azure_data_explorer_table_dataset_py3.py | 72 + ...zure_data_lake_analytics_linked_service.py | 32 +- ..._data_lake_analytics_linked_service_py3.py | 99 ++ .../models/azure_data_lake_store_dataset.py | 18 +- .../azure_data_lake_store_dataset_py3.py | 86 + .../azure_data_lake_store_linked_service.py | 28 +- ...zure_data_lake_store_linked_service_py3.py | 98 ++ .../models/azure_data_lake_store_location.py | 15 +- .../azure_data_lake_store_location_py3.py | 45 + .../azure_data_lake_store_read_settings.py | 20 +- ...azure_data_lake_store_read_settings_py3.py | 73 + .../models/azure_data_lake_store_sink.py | 12 +- .../models/azure_data_lake_store_sink_py3.py | 69 + .../models/azure_data_lake_store_source.py | 10 +- .../azure_data_lake_store_source_py3.py | 58 + .../azure_data_lake_store_write_settings.py | 15 +- ...zure_data_lake_store_write_settings_py3.py | 46 + .../models/azure_databricks_linked_service.py | 40 +- .../azure_databricks_linked_service_py3.py | 126 ++ .../models/azure_function_activity.py | 27 +- .../models/azure_function_activity_py3.py | 85 + .../models/azure_function_linked_service.py | 18 +- .../azure_function_linked_service_py3.py | 69 + .../models/azure_key_vault_linked_service.py | 12 +- .../azure_key_vault_linked_service_py3.py | 60 + .../azure_key_vault_secret_reference.py | 20 +- .../azure_key_vault_secret_reference_py3.py | 51 + .../models/azure_maria_db_linked_service.py | 14 +- .../azure_maria_db_linked_service_py3.py | 69 + .../models/azure_maria_db_source.py | 10 +- .../models/azure_maria_db_source_py3.py | 57 + .../models/azure_maria_db_table_dataset.py | 12 +- .../azure_maria_db_table_dataset_py3.py | 72 + .../azure_ml_batch_execution_activity.py | 16 +- .../azure_ml_batch_execution_activity_py3.py | 82 + .../models/azure_ml_linked_service.py | 29 +- .../models/azure_ml_linked_service_py3.py | 94 + .../azure_ml_update_resource_activity.py | 28 +- .../azure_ml_update_resource_activity_py3.py | 81 + .../models/azure_ml_web_service_file.py | 20 +- .../models/azure_ml_web_service_file_py3.py | 43 + .../models/azure_my_sql_linked_service.py | 16 +- .../models/azure_my_sql_linked_service_py3.py | 71 + .../datafactory/models/azure_my_sql_sink.py | 10 +- .../models/azure_my_sql_sink_py3.py | 66 + .../datafactory/models/azure_my_sql_source.py | 10 +- .../models/azure_my_sql_source_py3.py | 57 + .../models/azure_my_sql_table_dataset.py | 12 +- .../models/azure_my_sql_table_dataset_py3.py | 72 + .../azure_postgre_sql_linked_service.py | 14 +- .../azure_postgre_sql_linked_service_py3.py | 70 + .../models/azure_postgre_sql_sink.py | 10 +- .../models/azure_postgre_sql_sink_py3.py | 66 + .../models/azure_postgre_sql_source.py | 10 +- .../models/azure_postgre_sql_source_py3.py | 57 + .../models/azure_postgre_sql_table_dataset.py | 16 +- .../azure_postgre_sql_table_dataset_py3.py | 84 + .../datafactory/models/azure_queue_sink.py | 18 +- .../models/azure_queue_sink_py3.py | 61 + .../models/azure_search_index_dataset.py | 16 +- .../models/azure_search_index_dataset_py3.py | 73 + .../models/azure_search_index_sink.py | 10 +- .../models/azure_search_index_sink_py3.py | 67 + .../models/azure_search_linked_service.py | 18 +- .../models/azure_search_linked_service_py3.py | 69 + .../azure_sql_database_linked_service.py | 22 +- .../azure_sql_database_linked_service_py3.py | 87 + .../models/azure_sql_dw_linked_service.py | 22 +- .../models/azure_sql_dw_linked_service_py3.py | 88 + .../models/azure_sql_dw_table_dataset.py | 16 +- .../models/azure_sql_dw_table_dataset_py3.py | 82 + .../models/azure_sql_mi_linked_service.py | 22 +- .../models/azure_sql_mi_linked_service_py3.py | 87 + .../models/azure_sql_mi_table_dataset.py | 16 +- .../models/azure_sql_mi_table_dataset_py3.py | 82 + .../mgmt/datafactory/models/azure_sql_sink.py | 20 +- .../datafactory/models/azure_sql_sink_py3.py | 93 + .../datafactory/models/azure_sql_source.py | 16 +- .../models/azure_sql_source_py3.py | 73 + .../models/azure_sql_table_dataset.py | 16 +- .../models/azure_sql_table_dataset_py3.py | 82 + .../models/azure_storage_linked_service.py | 18 +- .../azure_storage_linked_service_py3.py | 83 + .../datafactory/models/azure_table_dataset.py | 16 +- .../models/azure_table_dataset_py3.py | 73 + .../datafactory/models/azure_table_sink.py | 16 +- .../models/azure_table_sink_py3.py | 81 + .../datafactory/models/azure_table_source.py | 12 +- .../models/azure_table_source_py3.py | 63 + .../azure_table_storage_linked_service.py | 18 +- .../azure_table_storage_linked_service_py3.py | 83 + .../mgmt/datafactory/models/binary_dataset.py | 16 +- .../datafactory/models/binary_dataset_py3.py | 77 + .../mgmt/datafactory/models/binary_sink.py | 10 +- .../datafactory/models/binary_sink_py3.py | 65 + .../mgmt/datafactory/models/binary_source.py | 10 +- .../datafactory/models/binary_source_py3.py | 56 + .../datafactory/models/blob_events_trigger.py | 21 +- .../models/blob_events_trigger_py3.py | 85 + .../mgmt/datafactory/models/blob_sink.py | 16 +- .../mgmt/datafactory/models/blob_sink_py3.py | 80 + .../mgmt/datafactory/models/blob_source.py | 14 +- .../datafactory/models/blob_source_py3.py | 68 + .../mgmt/datafactory/models/blob_trigger.py | 25 +- .../datafactory/models/blob_trigger_py3.py | 78 + .../models/cassandra_linked_service.py | 24 +- .../models/cassandra_linked_service_py3.py | 84 + .../datafactory/models/cassandra_source.py | 12 +- .../models/cassandra_source_py3.py | 70 + .../models/cassandra_table_dataset.py | 14 +- .../models/cassandra_table_dataset_py3.py | 77 + ...on_data_service_for_apps_entity_dataset.py | 12 +- ...ata_service_for_apps_entity_dataset_py3.py | 72 + ...on_data_service_for_apps_linked_service.py | 48 +- ...ata_service_for_apps_linked_service_py3.py | 115 ++ .../common_data_service_for_apps_sink.py | 14 +- .../common_data_service_for_apps_sink_py3.py | 77 + .../common_data_service_for_apps_source.py | 10 +- ...common_data_service_for_apps_source_py3.py | 58 + .../models/concur_linked_service.py | 28 +- .../models/concur_linked_service_py3.py | 92 + .../models/concur_object_dataset.py | 12 +- .../models/concur_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/concur_source.py | 10 +- .../datafactory/models/concur_source_py3.py | 57 + .../datafactory/models/control_activity.py | 19 +- .../models/control_activity_py3.py | 60 + .../mgmt/datafactory/models/copy_activity.py | 40 +- .../datafactory/models/copy_activity_py3.py | 124 ++ .../mgmt/datafactory/models/copy_sink.py | 20 +- .../mgmt/datafactory/models/copy_sink_py3.py | 82 + .../mgmt/datafactory/models/copy_source.py | 16 +- .../datafactory/models/copy_source_py3.py | 83 + .../models/cosmos_db_linked_service.py | 16 +- .../models/cosmos_db_linked_service_py3.py | 71 + ...smos_db_mongo_db_api_collection_dataset.py | 16 +- ..._db_mongo_db_api_collection_dataset_py3.py | 73 + .../cosmos_db_mongo_db_api_linked_service.py | 23 +- ...smos_db_mongo_db_api_linked_service_py3.py | 67 + .../models/cosmos_db_mongo_db_api_sink.py | 10 +- .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 + .../models/cosmos_db_mongo_db_api_source.py | 14 +- .../cosmos_db_mongo_db_api_source_py3.py | 71 + .../models/couchbase_linked_service.py | 14 +- .../models/couchbase_linked_service_py3.py | 70 + .../datafactory/models/couchbase_source.py | 10 +- .../models/couchbase_source_py3.py | 57 + .../models/couchbase_table_dataset.py | 12 +- .../models/couchbase_table_dataset_py3.py | 72 + ...eate_linked_integration_runtime_request.py | 12 +- ..._linked_integration_runtime_request_py3.py | 43 + .../datafactory/models/create_run_response.py | 10 +- .../models/create_run_response_py3.py | 34 + .../datafactory/models/custom_activity.py | 26 +- .../datafactory/models/custom_activity_py3.py | 91 + .../custom_activity_reference_object.py | 8 +- .../custom_activity_reference_object_py3.py | 33 + .../custom_data_source_linked_service.py | 12 +- .../custom_data_source_linked_service_py3.py | 58 + .../mgmt/datafactory/models/custom_dataset.py | 12 +- .../datafactory/models/custom_dataset_py3.py | 71 + .../data_factory_management_client_enums.py | 144 +- .../data_lake_analytics_usql_activity.py | 30 +- .../data_lake_analytics_usql_activity_py3.py | 98 ++ .../models/databricks_notebook_activity.py | 22 +- .../databricks_notebook_activity_py3.py | 76 + .../models/databricks_spark_jar_activity.py | 23 +- .../databricks_spark_jar_activity_py3.py | 75 + .../databricks_spark_python_activity.py | 21 +- .../databricks_spark_python_activity_py3.py | 75 + .../azure/mgmt/datafactory/models/dataset.py | 26 +- .../models/dataset_bzip2_compression.py | 13 +- .../models/dataset_bzip2_compression_py3.py | 38 + .../datafactory/models/dataset_compression.py | 10 +- .../models/dataset_compression_py3.py | 47 + .../models/dataset_deflate_compression.py | 10 +- .../models/dataset_deflate_compression_py3.py | 42 + .../mgmt/datafactory/models/dataset_folder.py | 6 +- .../datafactory/models/dataset_folder_py3.py | 29 + .../models/dataset_gzip_compression.py | 10 +- .../models/dataset_gzip_compression_py3.py | 42 + .../datafactory/models/dataset_location.py | 16 +- .../models/dataset_location_py3.py | 49 + .../mgmt/datafactory/models/dataset_py3.py | 113 ++ .../datafactory/models/dataset_reference.py | 15 +- .../models/dataset_reference_py3.py | 48 + .../datafactory/models/dataset_resource.py | 10 +- .../models/dataset_resource_py3.py | 53 + .../models/dataset_storage_format.py | 14 +- .../models/dataset_storage_format_py3.py | 57 + .../models/dataset_zip_deflate_compression.py | 10 +- .../dataset_zip_deflate_compression_py3.py | 42 + .../datafactory/models/db2_linked_service.py | 28 +- .../models/db2_linked_service_py3.py | 86 + .../mgmt/datafactory/models/db2_source.py | 10 +- .../mgmt/datafactory/models/db2_source_py3.py | 57 + .../datafactory/models/db2_table_dataset.py | 16 +- .../models/db2_table_dataset_py3.py | 82 + .../datafactory/models/delete_activity.py | 22 +- .../datafactory/models/delete_activity_py3.py | 87 + .../models/delimited_text_dataset.py | 32 +- .../models/delimited_text_dataset_py3.py | 122 ++ .../models/delimited_text_read_settings.py | 10 +- .../delimited_text_read_settings_py3.py | 43 + .../datafactory/models/delimited_text_sink.py | 12 +- .../models/delimited_text_sink_py3.py | 70 + .../models/delimited_text_source.py | 12 +- .../models/delimited_text_source_py3.py | 61 + .../models/delimited_text_write_settings.py | 16 +- .../delimited_text_write_settings_py3.py | 49 + .../models/dependency_reference.py | 8 +- .../models/dependency_reference_py3.py | 42 + .../datafactory/models/distcp_settings.py | 25 +- .../datafactory/models/distcp_settings_py3.py | 49 + .../models/document_db_collection_dataset.py | 16 +- .../document_db_collection_dataset_py3.py | 73 + .../models/document_db_collection_sink.py | 12 +- .../models/document_db_collection_sink_py3.py | 71 + .../models/document_db_collection_source.py | 12 +- .../document_db_collection_source_py3.py | 62 + .../models/drill_linked_service.py | 14 +- .../models/drill_linked_service_py3.py | 69 + .../mgmt/datafactory/models/drill_source.py | 10 +- .../datafactory/models/drill_source_py3.py | 57 + .../datafactory/models/drill_table_dataset.py | 16 +- .../models/drill_table_dataset_py3.py | 82 + .../models/dynamics_ax_linked_service.py | 46 +- .../models/dynamics_ax_linked_service_py3.py | 93 + .../models/dynamics_ax_resource_dataset.py | 16 +- .../dynamics_ax_resource_dataset_py3.py | 73 + .../datafactory/models/dynamics_ax_source.py | 10 +- .../models/dynamics_ax_source_py3.py | 57 + .../models/dynamics_crm_entity_dataset.py | 12 +- .../models/dynamics_crm_entity_dataset_py3.py | 72 + .../models/dynamics_crm_linked_service.py | 46 +- .../models/dynamics_crm_linked_service_py3.py | 112 ++ .../datafactory/models/dynamics_crm_sink.py | 14 +- .../models/dynamics_crm_sink_py3.py | 77 + .../datafactory/models/dynamics_crm_source.py | 10 +- .../models/dynamics_crm_source_py3.py | 58 + .../models/dynamics_entity_dataset.py | 12 +- .../models/dynamics_entity_dataset_py3.py | 72 + .../models/dynamics_linked_service.py | 43 +- .../models/dynamics_linked_service_py3.py | 109 ++ .../mgmt/datafactory/models/dynamics_sink.py | 14 +- .../datafactory/models/dynamics_sink_py3.py | 77 + .../datafactory/models/dynamics_source.py | 10 +- .../datafactory/models/dynamics_source_py3.py | 58 + .../models/eloqua_linked_service.py | 28 +- .../models/eloqua_linked_service_py3.py | 91 + .../models/eloqua_object_dataset.py | 12 +- .../models/eloqua_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/eloqua_source.py | 10 +- .../datafactory/models/eloqua_source_py3.py | 57 + .../datafactory/models/entity_reference.py | 8 +- .../models/entity_reference_py3.py | 34 + .../models/execute_pipeline_activity.py | 18 +- .../models/execute_pipeline_activity_py3.py | 65 + .../models/execute_ssis_package_activity.py | 38 +- .../execute_ssis_package_activity_py3.py | 124 ++ .../datafactory/models/execution_activity.py | 14 +- .../models/execution_activity_py3.py | 75 + .../models/exposure_control_request.py | 8 +- .../models/exposure_control_request_py3.py | 32 + .../models/exposure_control_response.py | 4 +- .../models/exposure_control_response_py3.py | 40 + .../mgmt/datafactory/models/expression.py | 12 +- .../mgmt/datafactory/models/expression_py3.py | 43 + .../azure/mgmt/datafactory/models/factory.py | 10 +- .../models/factory_git_hub_configuration.py | 18 +- .../factory_git_hub_configuration_py3.py | 58 + .../datafactory/models/factory_identity.py | 10 +- .../models/factory_identity_py3.py | 49 + .../mgmt/datafactory/models/factory_py3.py | 81 + .../models/factory_repo_configuration.py | 26 +- .../models/factory_repo_configuration_py3.py | 65 + .../datafactory/models/factory_repo_update.py | 8 +- .../models/factory_repo_update_py3.py | 33 + .../models/factory_update_parameters.py | 8 +- .../models/factory_update_parameters_py3.py | 32 + .../models/factory_vsts_configuration.py | 22 +- .../models/factory_vsts_configuration_py3.py | 62 + .../models/file_server_linked_service.py | 20 +- .../models/file_server_linked_service_py3.py | 74 + .../models/file_server_location.py | 15 +- .../models/file_server_location_py3.py | 45 + .../models/file_server_read_settings.py | 20 +- .../models/file_server_read_settings_py3.py | 73 + .../models/file_server_write_settings.py | 15 +- .../models/file_server_write_settings_py3.py | 46 + .../datafactory/models/file_share_dataset.py | 24 +- .../models/file_share_dataset_py3.py | 101 ++ .../datafactory/models/file_system_sink.py | 10 +- .../models/file_system_sink_py3.py | 65 + .../datafactory/models/file_system_source.py | 10 +- .../models/file_system_source_py3.py | 58 + .../datafactory/models/filter_activity.py | 18 +- .../datafactory/models/filter_activity_py3.py | 61 + .../datafactory/models/for_each_activity.py | 22 +- .../models/for_each_activity_py3.py | 73 + .../models/format_read_settings.py | 12 +- .../models/format_read_settings_py3.py | 39 + .../models/format_write_settings.py | 12 +- .../models/format_write_settings_py3.py | 39 + .../datafactory/models/ftp_read_settings.py | 16 +- .../models/ftp_read_settings_py3.py | 63 + .../models/ftp_server_linked_service.py | 28 +- .../models/ftp_server_linked_service_py3.py | 98 ++ .../datafactory/models/ftp_server_location.py | 15 +- .../models/ftp_server_location_py3.py | 45 + .../models/get_metadata_activity.py | 16 +- .../models/get_metadata_activity_py3.py | 67 + .../get_ssis_object_metadata_request.py | 6 +- .../get_ssis_object_metadata_request_py3.py | 28 + .../models/git_hub_access_token_request.py | 17 +- .../git_hub_access_token_request_py3.py | 44 + .../models/git_hub_access_token_response.py | 6 +- .../git_hub_access_token_response_py3.py | 28 + .../models/google_ad_words_linked_service.py | 45 +- .../google_ad_words_linked_service_py3.py | 119 ++ .../models/google_ad_words_object_dataset.py | 12 +- .../google_ad_words_object_dataset_py3.py | 72 + .../models/google_ad_words_source.py | 10 +- .../models/google_ad_words_source_py3.py | 57 + .../models/google_big_query_linked_service.py | 41 +- .../google_big_query_linked_service_py3.py | 124 ++ .../models/google_big_query_object_dataset.py | 16 +- .../google_big_query_object_dataset_py3.py | 82 + .../models/google_big_query_source.py | 10 +- .../models/google_big_query_source_py3.py | 57 + .../models/greenplum_linked_service.py | 14 +- .../models/greenplum_linked_service_py3.py | 69 + .../datafactory/models/greenplum_source.py | 10 +- .../models/greenplum_source_py3.py | 57 + .../models/greenplum_table_dataset.py | 16 +- .../models/greenplum_table_dataset_py3.py | 82 + .../models/hbase_linked_service.py | 39 +- .../models/hbase_linked_service_py3.py | 114 ++ .../models/hbase_object_dataset.py | 12 +- .../models/hbase_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hbase_source.py | 10 +- .../datafactory/models/hbase_source_py3.py | 57 + .../models/hd_insight_hive_activity.py | 26 +- .../models/hd_insight_hive_activity_py3.py | 96 + .../models/hd_insight_linked_service.py | 28 +- .../models/hd_insight_linked_service_py3.py | 96 + .../models/hd_insight_map_reduce_activity.py | 32 +- .../hd_insight_map_reduce_activity_py3.py | 99 ++ .../hd_insight_on_demand_linked_service.py | 106 +- ...hd_insight_on_demand_linked_service_py3.py | 237 +++ .../models/hd_insight_pig_activity.py | 22 +- .../models/hd_insight_pig_activity_py3.py | 87 + .../models/hd_insight_spark_activity.py | 36 +- .../models/hd_insight_spark_activity_py3.py | 100 ++ .../models/hd_insight_streaming_activity.py | 53 +- .../hd_insight_streaming_activity_py3.py | 122 ++ .../datafactory/models/hdfs_linked_service.py | 20 +- .../models/hdfs_linked_service_py3.py | 81 + .../mgmt/datafactory/models/hdfs_location.py | 15 +- .../datafactory/models/hdfs_location_py3.py | 45 + .../datafactory/models/hdfs_read_settings.py | 22 +- .../models/hdfs_read_settings_py3.py | 77 + .../mgmt/datafactory/models/hdfs_source.py | 12 +- .../datafactory/models/hdfs_source_py3.py | 62 + .../datafactory/models/hive_linked_service.py | 51 +- .../models/hive_linked_service_py3.py | 147 ++ .../datafactory/models/hive_object_dataset.py | 16 +- .../models/hive_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/hive_source.py | 10 +- .../datafactory/models/hive_source_py3.py | 57 + .../mgmt/datafactory/models/http_dataset.py | 22 +- .../datafactory/models/http_dataset_py3.py | 99 ++ .../datafactory/models/http_linked_service.py | 26 +- .../models/http_linked_service_py3.py | 105 ++ .../datafactory/models/http_read_settings.py | 16 +- .../models/http_read_settings_py3.py | 63 + .../models/http_server_location.py | 10 +- .../models/http_server_location_py3.py | 50 + .../mgmt/datafactory/models/http_source.py | 10 +- .../datafactory/models/http_source_py3.py | 60 + .../models/hubspot_linked_service.py | 27 +- .../models/hubspot_linked_service_py3.py | 96 + .../models/hubspot_object_dataset.py | 12 +- .../models/hubspot_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hubspot_source.py | 10 +- .../datafactory/models/hubspot_source_py3.py | 57 + .../models/if_condition_activity.py | 20 +- .../models/if_condition_activity_py3.py | 72 + .../models/impala_linked_service.py | 39 +- .../models/impala_linked_service_py3.py | 117 ++ .../models/impala_object_dataset.py | 16 +- .../models/impala_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/impala_source.py | 10 +- .../datafactory/models/impala_source_py3.py | 57 + .../models/informix_linked_service.py | 24 +- .../models/informix_linked_service_py3.py | 86 + .../mgmt/datafactory/models/informix_sink.py | 10 +- .../datafactory/models/informix_sink_py3.py | 66 + .../datafactory/models/informix_source.py | 10 +- .../datafactory/models/informix_source_py3.py | 57 + .../models/informix_table_dataset.py | 12 +- .../models/informix_table_dataset_py3.py | 72 + .../datafactory/models/integration_runtime.py | 12 +- .../models/integration_runtime_auth_keys.py | 8 +- .../integration_runtime_auth_keys_py3.py | 32 + .../integration_runtime_compute_properties.py | 16 +- ...egration_runtime_compute_properties_py3.py | 60 + .../integration_runtime_connection_info.py | 6 +- ...integration_runtime_connection_info_py3.py | 70 + ..._runtime_custom_setup_script_properties.py | 8 +- ...time_custom_setup_script_properties_py3.py | 33 + ...tegration_runtime_data_proxy_properties.py | 10 +- ...ation_runtime_data_proxy_properties_py3.py | 37 + .../integration_runtime_monitoring_data.py | 8 +- ...integration_runtime_monitoring_data_py3.py | 33 + .../integration_runtime_node_ip_address.py | 4 +- ...integration_runtime_node_ip_address_py3.py | 35 + ...ntegration_runtime_node_monitoring_data.py | 6 +- ...ration_runtime_node_monitoring_data_py3.py | 79 + .../models/integration_runtime_py3.py | 51 + .../models/integration_runtime_reference.py | 14 +- .../integration_runtime_reference_py3.py | 48 + ...ation_runtime_regenerate_key_parameters.py | 6 +- ...n_runtime_regenerate_key_parameters_py3.py | 30 + .../models/integration_runtime_resource.py | 10 +- .../integration_runtime_resource_py3.py | 53 + .../integration_runtime_ssis_catalog_info.py | 14 +- ...tegration_runtime_ssis_catalog_info_py3.py | 55 + .../integration_runtime_ssis_properties.py | 16 +- ...integration_runtime_ssis_properties_py3.py | 59 + .../models/integration_runtime_status.py | 10 +- ...ntegration_runtime_status_list_response.py | 12 +- ...ration_runtime_status_list_response_py3.py | 40 + .../models/integration_runtime_status_py3.py | 64 + .../integration_runtime_status_response.py | 10 +- ...integration_runtime_status_response_py3.py | 42 + .../integration_runtime_vnet_properties.py | 10 +- ...integration_runtime_vnet_properties_py3.py | 38 + .../datafactory/models/jira_linked_service.py | 31 +- .../models/jira_linked_service_py3.py | 98 ++ .../datafactory/models/jira_object_dataset.py | 12 +- .../models/jira_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/jira_source.py | 10 +- .../datafactory/models/jira_source_py3.py | 57 + .../mgmt/datafactory/models/json_dataset.py | 18 +- .../datafactory/models/json_dataset_py3.py | 85 + .../mgmt/datafactory/models/json_format.py | 18 +- .../datafactory/models/json_format_py3.py | 82 + .../mgmt/datafactory/models/json_sink.py | 12 +- .../mgmt/datafactory/models/json_sink_py3.py | 69 + .../mgmt/datafactory/models/json_source.py | 10 +- .../datafactory/models/json_source_py3.py | 56 + .../datafactory/models/json_write_settings.py | 10 +- .../models/json_write_settings_py3.py | 45 + .../models/linked_integration_runtime.py | 4 +- ...d_integration_runtime_key_authorization.py | 12 +- ...tegration_runtime_key_authorization_py3.py | 39 + .../models/linked_integration_runtime_py3.py | 58 + ..._integration_runtime_rbac_authorization.py | 14 +- ...egration_runtime_rbac_authorization_py3.py | 41 + .../linked_integration_runtime_request.py | 12 +- .../linked_integration_runtime_request_py3.py | 35 + .../models/linked_integration_runtime_type.py | 8 +- .../linked_integration_runtime_type_py3.py | 42 + .../mgmt/datafactory/models/linked_service.py | 18 +- .../datafactory/models/linked_service_py3.py | 102 ++ .../models/linked_service_reference.py | 14 +- .../models/linked_service_reference_py3.py | 48 + .../models/linked_service_resource.py | 10 +- .../models/linked_service_resource_py3.py | 53 + .../models/log_storage_settings.py | 15 +- .../models/log_storage_settings_py3.py | 46 + .../datafactory/models/lookup_activity.py | 22 +- .../datafactory/models/lookup_activity_py3.py | 74 + .../models/magento_linked_service.py | 22 +- .../models/magento_linked_service_py3.py | 85 + .../models/magento_object_dataset.py | 12 +- .../models/magento_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/magento_source.py | 10 +- .../datafactory/models/magento_source_py3.py | 57 + .../models/managed_integration_runtime.py | 12 +- .../managed_integration_runtime_error.py | 6 +- .../managed_integration_runtime_error_py3.py | 55 + .../managed_integration_runtime_node.py | 8 +- .../managed_integration_runtime_node_py3.py | 52 + ...ed_integration_runtime_operation_result.py | 6 +- ...ntegration_runtime_operation_result_py3.py | 65 + .../models/managed_integration_runtime_py3.py | 65 + .../managed_integration_runtime_status.py | 8 +- .../managed_integration_runtime_status_py3.py | 78 + .../models/maria_db_linked_service.py | 14 +- .../models/maria_db_linked_service_py3.py | 69 + .../datafactory/models/maria_db_source.py | 10 +- .../datafactory/models/maria_db_source_py3.py | 57 + .../models/maria_db_table_dataset.py | 12 +- .../models/maria_db_table_dataset_py3.py | 72 + .../models/marketo_linked_service.py | 26 +- .../models/marketo_linked_service_py3.py | 90 + .../models/marketo_object_dataset.py | 12 +- .../models/marketo_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/marketo_source.py | 10 +- .../datafactory/models/marketo_source_py3.py | 57 + .../models/microsoft_access_linked_service.py | 24 +- .../microsoft_access_linked_service_py3.py | 86 + .../models/microsoft_access_sink.py | 10 +- .../models/microsoft_access_sink_py3.py | 66 + .../models/microsoft_access_source.py | 10 +- .../models/microsoft_access_source_py3.py | 57 + .../models/microsoft_access_table_dataset.py | 12 +- .../microsoft_access_table_dataset_py3.py | 72 + .../models/mongo_db_collection_dataset.py | 16 +- .../models/mongo_db_collection_dataset_py3.py | 73 + .../mongo_db_cursor_methods_properties.py | 14 +- .../mongo_db_cursor_methods_properties_py3.py | 53 + .../models/mongo_db_linked_service.py | 36 +- .../models/mongo_db_linked_service_py3.py | 109 ++ .../datafactory/models/mongo_db_source.py | 10 +- .../datafactory/models/mongo_db_source_py3.py | 57 + .../models/mongo_db_v2_collection_dataset.py | 16 +- .../mongo_db_v2_collection_dataset_py3.py | 73 + .../models/mongo_db_v2_linked_service.py | 22 +- .../models/mongo_db_v2_linked_service_py3.py | 66 + .../datafactory/models/mongo_db_v2_source.py | 14 +- .../models/mongo_db_v2_source_py3.py | 71 + .../models/multiple_pipeline_trigger.py | 10 +- .../models/multiple_pipeline_trigger_py3.py | 68 + .../models/my_sql_linked_service.py | 16 +- .../models/my_sql_linked_service_py3.py | 70 + .../mgmt/datafactory/models/my_sql_source.py | 10 +- .../datafactory/models/my_sql_source_py3.py | 57 + .../models/my_sql_table_dataset.py | 12 +- .../models/my_sql_table_dataset_py3.py | 72 + .../models/netezza_linked_service.py | 14 +- .../models/netezza_linked_service_py3.py | 69 + .../models/netezza_partition_settings.py | 10 +- .../models/netezza_partition_settings_py3.py | 42 + .../mgmt/datafactory/models/netezza_source.py | 14 +- .../datafactory/models/netezza_source_py3.py | 70 + .../models/netezza_table_dataset.py | 16 +- .../models/netezza_table_dataset_py3.py | 82 + .../models/odata_linked_service.py | 36 +- .../models/odata_linked_service_py3.py | 127 ++ .../models/odata_resource_dataset.py | 12 +- .../models/odata_resource_dataset_py3.py | 72 + .../mgmt/datafactory/models/odata_source.py | 10 +- .../datafactory/models/odata_source_py3.py | 57 + .../datafactory/models/odbc_linked_service.py | 24 +- .../models/odbc_linked_service_py3.py | 86 + .../mgmt/datafactory/models/odbc_sink.py | 10 +- .../mgmt/datafactory/models/odbc_sink_py3.py | 66 + .../mgmt/datafactory/models/odbc_source.py | 10 +- .../datafactory/models/odbc_source_py3.py | 57 + .../datafactory/models/odbc_table_dataset.py | 12 +- .../models/odbc_table_dataset_py3.py | 72 + .../datafactory/models/office365_dataset.py | 18 +- .../models/office365_dataset_py3.py | 79 + .../models/office365_linked_service.py | 34 +- .../models/office365_linked_service_py3.py | 83 + .../datafactory/models/office365_source.py | 18 +- .../models/office365_source_py3.py | 78 + .../mgmt/datafactory/models/operation.py | 12 +- .../datafactory/models/operation_display.py | 12 +- .../models/operation_display_py3.py | 41 + .../models/operation_log_specification.py | 10 +- .../models/operation_log_specification_py3.py | 37 + .../models/operation_metric_availability.py | 8 +- .../operation_metric_availability_py3.py | 33 + .../models/operation_metric_dimension.py | 10 +- .../models/operation_metric_dimension_py3.py | 37 + .../models/operation_metric_specification.py | 24 +- .../operation_metric_specification_py3.py | 68 + .../mgmt/datafactory/models/operation_py3.py | 41 + .../models/operation_service_specification.py | 8 +- .../operation_service_specification_py3.py | 34 + .../models/oracle_linked_service.py | 16 +- .../models/oracle_linked_service_py3.py | 71 + .../models/oracle_partition_settings.py | 12 +- .../models/oracle_partition_settings_py3.py | 46 + .../oracle_service_cloud_linked_service.py | 32 +- ...oracle_service_cloud_linked_service_py3.py | 95 + .../oracle_service_cloud_object_dataset.py | 12 +- ...oracle_service_cloud_object_dataset_py3.py | 72 + .../models/oracle_service_cloud_source.py | 10 +- .../models/oracle_service_cloud_source_py3.py | 57 + .../mgmt/datafactory/models/oracle_sink.py | 10 +- .../datafactory/models/oracle_sink_py3.py | 66 + .../mgmt/datafactory/models/oracle_source.py | 16 +- .../datafactory/models/oracle_source_py3.py | 76 + .../models/oracle_table_dataset.py | 16 +- .../models/oracle_table_dataset_py3.py | 82 + .../mgmt/datafactory/models/orc_format.py | 15 +- .../mgmt/datafactory/models/orc_format_py3.py | 46 + .../models/parameter_specification.py | 14 +- .../models/parameter_specification_py3.py | 39 + .../datafactory/models/parquet_dataset.py | 16 +- .../datafactory/models/parquet_dataset_py3.py | 76 + .../mgmt/datafactory/models/parquet_format.py | 15 +- .../datafactory/models/parquet_format_py3.py | 46 + .../mgmt/datafactory/models/parquet_sink.py | 10 +- .../datafactory/models/parquet_sink_py3.py | 65 + .../mgmt/datafactory/models/parquet_source.py | 10 +- .../datafactory/models/parquet_source_py3.py | 56 + .../models/paypal_linked_service.py | 28 +- .../models/paypal_linked_service_py3.py | 92 + .../models/paypal_object_dataset.py | 12 +- .../models/paypal_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/paypal_source.py | 10 +- .../datafactory/models/paypal_source_py3.py | 57 + .../models/phoenix_linked_service.py | 40 +- .../models/phoenix_linked_service_py3.py | 121 ++ .../models/phoenix_object_dataset.py | 16 +- .../models/phoenix_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/phoenix_source.py | 10 +- .../datafactory/models/phoenix_source_py3.py | 57 + .../datafactory/models/pipeline_folder.py | 6 +- .../datafactory/models/pipeline_folder_py3.py | 29 + .../datafactory/models/pipeline_reference.py | 15 +- .../models/pipeline_reference_py3.py | 48 + .../datafactory/models/pipeline_resource.py | 20 +- .../models/pipeline_resource_py3.py | 84 + .../mgmt/datafactory/models/pipeline_run.py | 6 +- .../models/pipeline_run_invoked_by.py | 4 +- .../models/pipeline_run_invoked_by_py3.py | 45 + .../datafactory/models/pipeline_run_py3.py | 99 ++ .../models/pipeline_runs_query_response.py | 12 +- .../pipeline_runs_query_response_py3.py | 39 + .../datafactory/models/polybase_settings.py | 14 +- .../models/polybase_settings_py3.py | 53 + .../models/postgre_sql_linked_service.py | 16 +- .../models/postgre_sql_linked_service_py3.py | 70 + .../datafactory/models/postgre_sql_source.py | 10 +- .../models/postgre_sql_source_py3.py | 57 + .../models/postgre_sql_table_dataset.py | 16 +- .../models/postgre_sql_table_dataset_py3.py | 82 + .../models/presto_linked_service.py | 50 +- .../models/presto_linked_service_py3.py | 132 ++ .../models/presto_object_dataset.py | 16 +- .../models/presto_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/presto_source.py | 10 +- .../datafactory/models/presto_source_py3.py | 57 + .../models/quick_books_linked_service.py | 42 +- .../models/quick_books_linked_service_py3.py | 100 ++ .../models/quick_books_object_dataset.py | 12 +- .../models/quick_books_object_dataset_py3.py | 72 + .../datafactory/models/quick_books_source.py | 10 +- .../models/quick_books_source_py3.py | 57 + .../datafactory/models/recurrence_schedule.py | 16 +- .../models/recurrence_schedule_occurrence.py | 10 +- .../recurrence_schedule_occurrence_py3.py | 38 + .../models/recurrence_schedule_py3.py | 50 + .../redirect_incompatible_row_settings.py | 20 +- .../redirect_incompatible_row_settings_py3.py | 47 + .../models/redshift_unload_settings.py | 24 +- .../models/redshift_unload_settings_py3.py | 48 + .../datafactory/models/relational_source.py | 10 +- .../models/relational_source_py3.py | 57 + .../models/relational_table_dataset.py | 12 +- .../models/relational_table_dataset_py3.py | 72 + .../models/rerun_trigger_resource.py | 10 +- .../models/rerun_trigger_resource_py3.py | 54 + .../models/rerun_tumbling_window_trigger.py | 28 +- ...mbling_window_trigger_action_parameters.py | 22 +- ...ng_window_trigger_action_parameters_py3.py | 47 + .../rerun_tumbling_window_trigger_py3.py | 78 + .../azure/mgmt/datafactory/models/resource.py | 8 +- .../mgmt/datafactory/models/resource_py3.py | 58 + .../models/responsys_linked_service.py | 28 +- .../models/responsys_linked_service_py3.py | 94 + .../models/responsys_object_dataset.py | 12 +- .../models/responsys_object_dataset_py3.py | 72 + .../datafactory/models/responsys_source.py | 10 +- .../models/responsys_source_py3.py | 57 + .../models/rest_resource_dataset.py | 20 +- .../models/rest_resource_dataset_py3.py | 93 + .../models/rest_service_linked_service.py | 36 +- .../models/rest_service_linked_service_py3.py | 107 ++ .../mgmt/datafactory/models/rest_source.py | 20 +- .../datafactory/models/rest_source_py3.py | 86 + .../mgmt/datafactory/models/retry_policy.py | 8 +- .../datafactory/models/retry_policy_py3.py | 38 + .../models/run_filter_parameters.py | 24 +- .../models/run_filter_parameters_py3.py | 54 + .../datafactory/models/run_query_filter.py | 34 +- .../models/run_query_filter_py3.py | 53 + .../datafactory/models/run_query_order_by.py | 28 +- .../models/run_query_order_by_py3.py | 46 + .../models/salesforce_linked_service.py | 18 +- .../models/salesforce_linked_service_py3.py | 82 + ...lesforce_marketing_cloud_linked_service.py | 25 +- ...orce_marketing_cloud_linked_service_py3.py | 91 + ...lesforce_marketing_cloud_object_dataset.py | 12 +- ...orce_marketing_cloud_object_dataset_py3.py | 72 + .../salesforce_marketing_cloud_source.py | 10 +- .../salesforce_marketing_cloud_source_py3.py | 57 + .../models/salesforce_object_dataset.py | 12 +- .../models/salesforce_object_dataset_py3.py | 72 + ...salesforce_service_cloud_linked_service.py | 20 +- ...sforce_service_cloud_linked_service_py3.py | 87 + ...salesforce_service_cloud_object_dataset.py | 12 +- ...sforce_service_cloud_object_dataset_py3.py | 72 + .../models/salesforce_service_cloud_sink.py | 14 +- .../salesforce_service_cloud_sink_py3.py | 84 + .../models/salesforce_service_cloud_source.py | 12 +- .../salesforce_service_cloud_source_py3.py | 63 + .../datafactory/models/salesforce_sink.py | 14 +- .../datafactory/models/salesforce_sink_py3.py | 84 + .../datafactory/models/salesforce_source.py | 12 +- .../models/salesforce_source_py3.py | 63 + .../datafactory/models/sap_bw_cube_dataset.py | 22 +- .../models/sap_bw_cube_dataset_py3.py | 67 + .../models/sap_bw_linked_service.py | 36 +- .../models/sap_bw_linked_service_py3.py | 88 + .../mgmt/datafactory/models/sap_bw_source.py | 10 +- .../datafactory/models/sap_bw_source_py3.py | 57 + .../sap_cloud_for_customer_linked_service.py | 22 +- ...p_cloud_for_customer_linked_service_py3.py | 76 + ...sap_cloud_for_customer_resource_dataset.py | 16 +- ...cloud_for_customer_resource_dataset_py3.py | 73 + .../models/sap_cloud_for_customer_sink.py | 10 +- .../models/sap_cloud_for_customer_sink_py3.py | 67 + .../models/sap_cloud_for_customer_source.py | 10 +- .../sap_cloud_for_customer_source_py3.py | 57 + .../models/sap_ecc_linked_service.py | 18 +- .../models/sap_ecc_linked_service_py3.py | 76 + .../models/sap_ecc_resource_dataset.py | 16 +- .../models/sap_ecc_resource_dataset_py3.py | 73 + .../mgmt/datafactory/models/sap_ecc_source.py | 10 +- .../datafactory/models/sap_ecc_source_py3.py | 57 + .../models/sap_hana_linked_service.py | 24 +- .../models/sap_hana_linked_service_py3.py | 85 + .../datafactory/models/sap_hana_source.py | 12 +- .../datafactory/models/sap_hana_source_py3.py | 62 + .../models/sap_hana_table_dataset.py | 14 +- .../models/sap_hana_table_dataset_py3.py | 77 + .../models/sap_open_hub_linked_service.py | 37 +- .../models/sap_open_hub_linked_service_py3.py | 99 ++ .../datafactory/models/sap_open_hub_source.py | 12 +- .../models/sap_open_hub_source_py3.py | 66 + .../models/sap_open_hub_table_dataset.py | 22 +- .../models/sap_open_hub_table_dataset_py3.py | 87 + .../models/sap_table_linked_service.py | 40 +- .../models/sap_table_linked_service_py3.py | 140 ++ .../models/sap_table_partition_settings.py | 12 +- .../sap_table_partition_settings_py3.py | 47 + .../models/sap_table_resource_dataset.py | 16 +- .../models/sap_table_resource_dataset_py3.py | 73 + .../datafactory/models/sap_table_source.py | 24 +- .../models/sap_table_source_py3.py | 100 ++ .../datafactory/models/schedule_trigger.py | 12 +- .../models/schedule_trigger_py3.py | 64 + .../models/schedule_trigger_recurrence.py | 18 +- .../models/schedule_trigger_recurrence_py3.py | 54 + .../mgmt/datafactory/models/script_action.py | 22 +- .../datafactory/models/script_action_py3.py | 49 + .../mgmt/datafactory/models/secret_base.py | 8 +- .../datafactory/models/secret_base_py3.py | 41 + .../mgmt/datafactory/models/secure_string.py | 12 +- .../datafactory/models/secure_string_py3.py | 40 + ...dency_tumbling_window_trigger_reference.py | 16 +- ...y_tumbling_window_trigger_reference_py3.py | 46 + .../models/self_hosted_integration_runtime.py | 10 +- .../self_hosted_integration_runtime_node.py | 6 +- ...elf_hosted_integration_runtime_node_py3.py | 139 ++ .../self_hosted_integration_runtime_py3.py | 46 + .../self_hosted_integration_runtime_status.py | 12 +- ...f_hosted_integration_runtime_status_py3.py | 146 ++ .../models/service_now_linked_service.py | 34 +- .../models/service_now_linked_service_py3.py | 106 ++ .../models/service_now_object_dataset.py | 12 +- .../models/service_now_object_dataset_py3.py | 72 + .../datafactory/models/service_now_source.py | 10 +- .../models/service_now_source_py3.py | 57 + .../models/set_variable_activity.py | 14 +- .../models/set_variable_activity_py3.py | 59 + .../mgmt/datafactory/models/sftp_location.py | 15 +- .../datafactory/models/sftp_location_py3.py | 45 + .../datafactory/models/sftp_read_settings.py | 18 +- .../models/sftp_read_settings_py3.py | 68 + .../models/sftp_server_linked_service.py | 34 +- .../models/sftp_server_linked_service_py3.py | 119 ++ .../models/shopify_linked_service.py | 22 +- .../models/shopify_linked_service_py3.py | 86 + .../models/shopify_object_dataset.py | 12 +- .../models/shopify_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/shopify_source.py | 10 +- .../datafactory/models/shopify_source_py3.py | 57 + .../models/spark_linked_service.py | 46 +- .../models/spark_linked_service_py3.py | 131 ++ .../models/spark_object_dataset.py | 16 +- .../models/spark_object_dataset_py3.py | 82 + .../mgmt/datafactory/models/spark_source.py | 10 +- .../datafactory/models/spark_source_py3.py | 57 + .../mgmt/datafactory/models/sql_dw_sink.py | 16 +- .../datafactory/models/sql_dw_sink_py3.py | 83 + .../mgmt/datafactory/models/sql_dw_source.py | 14 +- .../datafactory/models/sql_dw_source_py3.py | 70 + .../mgmt/datafactory/models/sql_mi_sink.py | 20 +- .../datafactory/models/sql_mi_sink_py3.py | 93 + .../mgmt/datafactory/models/sql_mi_source.py | 16 +- .../datafactory/models/sql_mi_source_py3.py | 73 + .../models/sql_server_linked_service.py | 18 +- .../models/sql_server_linked_service_py3.py | 74 + .../datafactory/models/sql_server_sink.py | 20 +- .../datafactory/models/sql_server_sink_py3.py | 93 + .../datafactory/models/sql_server_source.py | 16 +- .../models/sql_server_source_py3.py | 73 + .../sql_server_stored_procedure_activity.py | 18 +- ...ql_server_stored_procedure_activity_py3.py | 70 + .../models/sql_server_table_dataset.py | 16 +- .../models/sql_server_table_dataset_py3.py | 82 + .../azure/mgmt/datafactory/models/sql_sink.py | 20 +- .../mgmt/datafactory/models/sql_sink_py3.py | 93 + .../mgmt/datafactory/models/sql_source.py | 14 +- .../mgmt/datafactory/models/sql_source_py3.py | 69 + .../models/square_linked_service.py | 34 +- .../models/square_linked_service_py3.py | 98 ++ .../models/square_object_dataset.py | 12 +- .../models/square_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/square_source.py | 10 +- .../datafactory/models/square_source_py3.py | 57 + .../models/ssis_access_credential.py | 18 +- .../models/ssis_access_credential_py3.py | 44 + .../datafactory/models/ssis_environment.py | 12 +- .../models/ssis_environment_py3.py | 51 + .../models/ssis_environment_reference.py | 12 +- .../models/ssis_environment_reference_py3.py | 40 + .../models/ssis_execution_credential.py | 18 +- .../models/ssis_execution_credential_py3.py | 44 + .../models/ssis_execution_parameter.py | 12 +- .../models/ssis_execution_parameter_py3.py | 35 + .../mgmt/datafactory/models/ssis_folder.py | 15 +- .../datafactory/models/ssis_folder_py3.py | 43 + .../datafactory/models/ssis_log_location.py | 19 +- .../models/ssis_log_location_py3.py | 57 + .../models/ssis_object_metadata.py | 14 +- .../ssis_object_metadata_list_response.py | 8 +- .../ssis_object_metadata_list_response_py3.py | 33 + .../models/ssis_object_metadata_py3.py | 53 + .../ssis_object_metadata_status_response.py | 12 +- ...sis_object_metadata_status_response_py3.py | 40 + .../mgmt/datafactory/models/ssis_package.py | 16 +- .../models/ssis_package_location.py | 20 +- .../models/ssis_package_location_py3.py | 54 + .../datafactory/models/ssis_package_py3.py | 59 + .../mgmt/datafactory/models/ssis_parameter.py | 28 +- .../datafactory/models/ssis_parameter_py3.py | 72 + .../mgmt/datafactory/models/ssis_project.py | 16 +- .../datafactory/models/ssis_project_py3.py | 60 + .../models/ssis_property_override.py | 14 +- .../models/ssis_property_override_py3.py | 40 + .../mgmt/datafactory/models/ssis_variable.py | 18 +- .../datafactory/models/ssis_variable_py3.py | 52 + .../datafactory/models/staging_settings.py | 16 +- .../models/staging_settings_py3.py | 51 + .../datafactory/models/store_read_settings.py | 14 +- .../models/store_read_settings_py3.py | 45 + .../models/store_write_settings.py | 14 +- .../models/store_write_settings_py3.py | 57 + .../models/stored_procedure_parameter.py | 8 +- .../models/stored_procedure_parameter_py3.py | 35 + .../mgmt/datafactory/models/sub_resource.py | 4 +- .../datafactory/models/sub_resource_py3.py | 50 + .../models/sybase_linked_service.py | 30 +- .../models/sybase_linked_service_py3.py | 91 + .../mgmt/datafactory/models/sybase_source.py | 10 +- .../datafactory/models/sybase_source_py3.py | 57 + .../models/sybase_table_dataset.py | 12 +- .../models/sybase_table_dataset_py3.py | 72 + .../models/teradata_linked_service.py | 20 +- .../models/teradata_linked_service_py3.py | 84 + .../models/teradata_partition_settings.py | 10 +- .../models/teradata_partition_settings_py3.py | 42 + .../datafactory/models/teradata_source.py | 14 +- .../datafactory/models/teradata_source_py3.py | 70 + .../models/teradata_table_dataset.py | 14 +- .../models/teradata_table_dataset_py3.py | 77 + .../mgmt/datafactory/models/text_format.py | 26 +- .../datafactory/models/text_format_py3.py | 99 ++ .../azure/mgmt/datafactory/models/trigger.py | 14 +- .../models/trigger_dependency_reference.py | 12 +- .../trigger_dependency_reference_py3.py | 46 + .../models/trigger_pipeline_reference.py | 8 +- .../models/trigger_pipeline_reference_py3.py | 32 + .../mgmt/datafactory/models/trigger_py3.py | 68 + .../datafactory/models/trigger_reference.py | 13 +- .../models/trigger_reference_py3.py | 44 + .../datafactory/models/trigger_resource.py | 10 +- .../models/trigger_resource_py3.py | 53 + .../mgmt/datafactory/models/trigger_run.py | 6 +- .../datafactory/models/trigger_run_py3.py | 78 + .../models/trigger_runs_query_response.py | 12 +- .../models/trigger_runs_query_response_py3.py | 39 + .../trigger_subscription_operation_status.py | 4 +- ...igger_subscription_operation_status_py3.py | 42 + .../models/tumbling_window_trigger.py | 48 +- ...ing_window_trigger_dependency_reference.py | 14 +- ...window_trigger_dependency_reference_py3.py | 50 + .../models/tumbling_window_trigger_py3.py | 112 ++ .../mgmt/datafactory/models/until_activity.py | 22 +- .../datafactory/models/until_activity_py3.py | 72 + ...update_integration_runtime_node_request.py | 6 +- ...te_integration_runtime_node_request_py3.py | 34 + .../update_integration_runtime_request.py | 8 +- .../update_integration_runtime_request_py3.py | 38 + .../datafactory/models/user_access_policy.py | 14 +- .../models/user_access_policy_py3.py | 51 + .../mgmt/datafactory/models/user_property.py | 16 +- .../datafactory/models/user_property_py3.py | 40 + .../datafactory/models/validation_activity.py | 22 +- .../models/validation_activity_py3.py | 81 + .../models/variable_specification.py | 14 +- .../models/variable_specification_py3.py | 39 + .../models/vertica_linked_service.py | 14 +- .../models/vertica_linked_service_py3.py | 69 + .../mgmt/datafactory/models/vertica_source.py | 10 +- .../datafactory/models/vertica_source_py3.py | 57 + .../models/vertica_table_dataset.py | 16 +- .../models/vertica_table_dataset_py3.py | 82 + .../mgmt/datafactory/models/wait_activity.py | 14 +- .../datafactory/models/wait_activity_py3.py | 56 + .../mgmt/datafactory/models/web_activity.py | 32 +- .../models/web_activity_authentication.py | 19 +- .../models/web_activity_authentication_py3.py | 53 + .../datafactory/models/web_activity_py3.py | 98 ++ .../models/web_anonymous_authentication.py | 15 +- .../web_anonymous_authentication_py3.py | 41 + .../models/web_basic_authentication.py | 20 +- .../models/web_basic_authentication_py3.py | 52 + .../web_client_certificate_authentication.py | 18 +- ...b_client_certificate_authentication_py3.py | 53 + .../datafactory/models/web_hook_activity.py | 27 +- .../models/web_hook_activity_py3.py | 92 + .../datafactory/models/web_linked_service.py | 12 +- .../models/web_linked_service_py3.py | 59 + .../web_linked_service_type_properties.py | 12 +- .../web_linked_service_type_properties_py3.py | 50 + .../mgmt/datafactory/models/web_source.py | 16 +- .../mgmt/datafactory/models/web_source_py3.py | 52 + .../datafactory/models/web_table_dataset.py | 18 +- .../models/web_table_dataset_py3.py | 78 + .../datafactory/models/xero_linked_service.py | 25 +- .../models/xero_linked_service_py3.py | 93 + .../datafactory/models/xero_object_dataset.py | 12 +- .../models/xero_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/xero_source.py | 10 +- .../datafactory/models/xero_source_py3.py | 57 + .../datafactory/models/zoho_linked_service.py | 22 +- .../models/zoho_linked_service_py3.py | 85 + .../datafactory/models/zoho_object_dataset.py | 12 +- .../models/zoho_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/zoho_source.py | 10 +- .../datafactory/models/zoho_source_py3.py | 57 + .../operations/activity_runs_operations.py | 6 +- .../operations/datasets_operations.py | 24 +- .../operations/exposure_control_operations.py | 12 +- .../operations/factories_operations.py | 55 +- .../integration_runtime_nodes_operations.py | 23 +- ...tion_runtime_object_metadata_operations.py | 63 +- .../integration_runtimes_operations.py | 188 +- .../operations/linked_services_operations.py | 24 +- .../mgmt/datafactory/operations/operations.py | 7 +- .../operations/pipeline_runs_operations.py | 17 +- .../operations/pipelines_operations.py | 30 +- .../operations/rerun_triggers_operations.py | 167 +- .../operations/trigger_runs_operations.py | 11 +- .../operations/triggers_operations.py | 240 +-- swagger_to_sdk_config.json | 2 +- 1048 files changed, 41404 insertions(+), 4890 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py index 40e1e7c37322..bb8a2a22fd77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -9,7 +9,7 @@ # regenerated. # -------------------------------------------------------------------------- -from msrest.service_client import ServiceClient +from msrest.service_client import SDKClient from msrest import Serializer, Deserializer from msrestazure import AzureConfiguration from .version import VERSION @@ -62,7 +62,7 @@ def __init__( self.subscription_id = subscription_id -class DataFactoryManagementClient(object): +class DataFactoryManagementClient(SDKClient): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. :ivar config: Configuration for client. @@ -109,7 +109,7 @@ def __init__( self, credentials, subscription_id, base_url=None): self.config = DataFactoryManagementClientConfiguration(credentials, subscription_id, base_url) - self._client = ServiceClient(self.config.credentials, self.config) + super(DataFactoryManagementClient, self).__init__(self.config.credentials, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self.api_version = '2018-06-01' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index cb75267dd073..395f7908afbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -9,521 +9,1038 @@ # regenerated. # -------------------------------------------------------------------------- -from .resource import Resource -from .sub_resource import SubResource -from .expression import Expression -from .secure_string import SecureString -from .linked_service_reference import LinkedServiceReference -from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference -from .secret_base import SecretBase -from .factory_identity import FactoryIdentity -from .factory_repo_configuration import FactoryRepoConfiguration -from .factory import Factory -from .integration_runtime import IntegrationRuntime -from .integration_runtime_resource import IntegrationRuntimeResource -from .integration_runtime_reference import IntegrationRuntimeReference -from .integration_runtime_status import IntegrationRuntimeStatus -from .integration_runtime_status_response import IntegrationRuntimeStatusResponse -from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse -from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest -from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest -from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest -from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest -from .parameter_specification import ParameterSpecification -from .linked_service import LinkedService -from .linked_service_resource import LinkedServiceResource -from .dataset_folder import DatasetFolder -from .dataset import Dataset -from .dataset_resource import DatasetResource -from .activity_dependency import ActivityDependency -from .user_property import UserProperty -from .activity import Activity -from .variable_specification import VariableSpecification -from .pipeline_folder import PipelineFolder -from .pipeline_resource import PipelineResource -from .trigger import Trigger -from .trigger_resource import TriggerResource -from .create_run_response import CreateRunResponse -from .trigger_subscription_operation_status import TriggerSubscriptionOperationStatus -from .factory_vsts_configuration import FactoryVSTSConfiguration -from .factory_git_hub_configuration import FactoryGitHubConfiguration -from .factory_repo_update import FactoryRepoUpdate -from .git_hub_access_token_request import GitHubAccessTokenRequest -from .git_hub_access_token_response import GitHubAccessTokenResponse -from .user_access_policy import UserAccessPolicy -from .access_policy_response import AccessPolicyResponse -from .pipeline_reference import PipelineReference -from .trigger_pipeline_reference import TriggerPipelineReference -from .factory_update_parameters import FactoryUpdateParameters -from .dataset_reference import DatasetReference -from .run_query_filter import RunQueryFilter -from .run_query_order_by import RunQueryOrderBy -from .run_filter_parameters import RunFilterParameters -from .pipeline_run_invoked_by import PipelineRunInvokedBy -from .pipeline_run import PipelineRun -from .pipeline_runs_query_response import PipelineRunsQueryResponse -from .activity_run import ActivityRun -from .activity_runs_query_response import ActivityRunsQueryResponse -from .trigger_run import TriggerRun -from .trigger_runs_query_response import TriggerRunsQueryResponse -from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters -from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger -from .rerun_trigger_resource import RerunTriggerResource -from .operation_display import OperationDisplay -from .operation_log_specification import OperationLogSpecification -from .operation_metric_availability import OperationMetricAvailability -from .operation_metric_dimension import OperationMetricDimension -from .operation_metric_specification import OperationMetricSpecification -from .operation_service_specification import OperationServiceSpecification -from .operation import Operation -from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest -from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse -from .exposure_control_request import ExposureControlRequest -from .exposure_control_response import ExposureControlResponse -from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference -from .trigger_reference import TriggerReference -from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference -from .trigger_dependency_reference import TriggerDependencyReference -from .dependency_reference import DependencyReference -from .retry_policy import RetryPolicy -from .tumbling_window_trigger import TumblingWindowTrigger -from .blob_events_trigger import BlobEventsTrigger -from .blob_trigger import BlobTrigger -from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence -from .recurrence_schedule import RecurrenceSchedule -from .schedule_trigger_recurrence import ScheduleTriggerRecurrence -from .schedule_trigger import ScheduleTrigger -from .multiple_pipeline_trigger import MultiplePipelineTrigger -from .azure_function_linked_service import AzureFunctionLinkedService -from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService -from .sap_table_linked_service import SapTableLinkedService -from .google_ad_words_linked_service import GoogleAdWordsLinkedService -from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService -from .dynamics_ax_linked_service import DynamicsAXLinkedService -from .responsys_linked_service import ResponsysLinkedService -from .azure_databricks_linked_service import AzureDatabricksLinkedService -from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService -from .script_action import ScriptAction -from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService -from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService -from .netezza_linked_service import NetezzaLinkedService -from .vertica_linked_service import VerticaLinkedService -from .zoho_linked_service import ZohoLinkedService -from .xero_linked_service import XeroLinkedService -from .square_linked_service import SquareLinkedService -from .spark_linked_service import SparkLinkedService -from .shopify_linked_service import ShopifyLinkedService -from .service_now_linked_service import ServiceNowLinkedService -from .quick_books_linked_service import QuickBooksLinkedService -from .presto_linked_service import PrestoLinkedService -from .phoenix_linked_service import PhoenixLinkedService -from .paypal_linked_service import PaypalLinkedService -from .marketo_linked_service import MarketoLinkedService -from .azure_maria_db_linked_service import AzureMariaDBLinkedService -from .maria_db_linked_service import MariaDBLinkedService -from .magento_linked_service import MagentoLinkedService -from .jira_linked_service import JiraLinkedService -from .impala_linked_service import ImpalaLinkedService -from .hubspot_linked_service import HubspotLinkedService -from .hive_linked_service import HiveLinkedService -from .hbase_linked_service import HBaseLinkedService -from .greenplum_linked_service import GreenplumLinkedService -from .google_big_query_linked_service import GoogleBigQueryLinkedService -from .eloqua_linked_service import EloquaLinkedService -from .drill_linked_service import DrillLinkedService -from .couchbase_linked_service import CouchbaseLinkedService -from .concur_linked_service import ConcurLinkedService -from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService -from .amazon_mws_linked_service import AmazonMWSLinkedService -from .sap_hana_linked_service import SapHanaLinkedService -from .sap_bw_linked_service import SapBWLinkedService -from .sftp_server_linked_service import SftpServerLinkedService -from .ftp_server_linked_service import FtpServerLinkedService -from .http_linked_service import HttpLinkedService -from .azure_search_linked_service import AzureSearchLinkedService -from .custom_data_source_linked_service import CustomDataSourceLinkedService -from .amazon_redshift_linked_service import AmazonRedshiftLinkedService -from .amazon_s3_linked_service import AmazonS3LinkedService -from .rest_service_linked_service import RestServiceLinkedService -from .sap_open_hub_linked_service import SapOpenHubLinkedService -from .sap_ecc_linked_service import SapEccLinkedService -from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService -from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService -from .salesforce_linked_service import SalesforceLinkedService -from .office365_linked_service import Office365LinkedService -from .azure_blob_fs_linked_service import AzureBlobFSLinkedService -from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService -from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService -from .mongo_db_v2_linked_service import MongoDbV2LinkedService -from .mongo_db_linked_service import MongoDbLinkedService -from .cassandra_linked_service import CassandraLinkedService -from .web_client_certificate_authentication import WebClientCertificateAuthentication -from .web_basic_authentication import WebBasicAuthentication -from .web_anonymous_authentication import WebAnonymousAuthentication -from .web_linked_service_type_properties import WebLinkedServiceTypeProperties -from .web_linked_service import WebLinkedService -from .odata_linked_service import ODataLinkedService -from .hdfs_linked_service import HdfsLinkedService -from .microsoft_access_linked_service import MicrosoftAccessLinkedService -from .informix_linked_service import InformixLinkedService -from .odbc_linked_service import OdbcLinkedService -from .azure_ml_linked_service import AzureMLLinkedService -from .teradata_linked_service import TeradataLinkedService -from .db2_linked_service import Db2LinkedService -from .sybase_linked_service import SybaseLinkedService -from .postgre_sql_linked_service import PostgreSqlLinkedService -from .my_sql_linked_service import MySqlLinkedService -from .azure_my_sql_linked_service import AzureMySqlLinkedService -from .oracle_linked_service import OracleLinkedService -from .file_server_linked_service import FileServerLinkedService -from .hd_insight_linked_service import HDInsightLinkedService -from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService -from .dynamics_crm_linked_service import DynamicsCrmLinkedService -from .dynamics_linked_service import DynamicsLinkedService -from .cosmos_db_linked_service import CosmosDbLinkedService -from .azure_key_vault_linked_service import AzureKeyVaultLinkedService -from .azure_batch_linked_service import AzureBatchLinkedService -from .azure_sql_mi_linked_service import AzureSqlMILinkedService -from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService -from .sql_server_linked_service import SqlServerLinkedService -from .azure_sql_dw_linked_service import AzureSqlDWLinkedService -from .azure_table_storage_linked_service import AzureTableStorageLinkedService -from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService -from .azure_storage_linked_service import AzureStorageLinkedService -from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset -from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset -from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset -from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset -from .responsys_object_dataset import ResponsysObjectDataset -from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset -from .vertica_table_dataset import VerticaTableDataset -from .netezza_table_dataset import NetezzaTableDataset -from .zoho_object_dataset import ZohoObjectDataset -from .xero_object_dataset import XeroObjectDataset -from .square_object_dataset import SquareObjectDataset -from .spark_object_dataset import SparkObjectDataset -from .shopify_object_dataset import ShopifyObjectDataset -from .service_now_object_dataset import ServiceNowObjectDataset -from .quick_books_object_dataset import QuickBooksObjectDataset -from .presto_object_dataset import PrestoObjectDataset -from .phoenix_object_dataset import PhoenixObjectDataset -from .paypal_object_dataset import PaypalObjectDataset -from .marketo_object_dataset import MarketoObjectDataset -from .azure_maria_db_table_dataset import AzureMariaDBTableDataset -from .maria_db_table_dataset import MariaDBTableDataset -from .magento_object_dataset import MagentoObjectDataset -from .jira_object_dataset import JiraObjectDataset -from .impala_object_dataset import ImpalaObjectDataset -from .hubspot_object_dataset import HubspotObjectDataset -from .hive_object_dataset import HiveObjectDataset -from .hbase_object_dataset import HBaseObjectDataset -from .greenplum_table_dataset import GreenplumTableDataset -from .google_big_query_object_dataset import GoogleBigQueryObjectDataset -from .eloqua_object_dataset import EloquaObjectDataset -from .drill_table_dataset import DrillTableDataset -from .couchbase_table_dataset import CouchbaseTableDataset -from .concur_object_dataset import ConcurObjectDataset -from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset -from .amazon_mws_object_dataset import AmazonMWSObjectDataset -from .dataset_zip_deflate_compression import DatasetZipDeflateCompression -from .dataset_deflate_compression import DatasetDeflateCompression -from .dataset_gzip_compression import DatasetGZipCompression -from .dataset_bzip2_compression import DatasetBZip2Compression -from .dataset_compression import DatasetCompression -from .parquet_format import ParquetFormat -from .orc_format import OrcFormat -from .avro_format import AvroFormat -from .json_format import JsonFormat -from .text_format import TextFormat -from .dataset_storage_format import DatasetStorageFormat -from .http_dataset import HttpDataset -from .azure_search_index_dataset import AzureSearchIndexDataset -from .web_table_dataset import WebTableDataset -from .sap_table_resource_dataset import SapTableResourceDataset -from .rest_resource_dataset import RestResourceDataset -from .sql_server_table_dataset import SqlServerTableDataset -from .sap_open_hub_table_dataset import SapOpenHubTableDataset -from .sap_hana_table_dataset import SapHanaTableDataset -from .sap_ecc_resource_dataset import SapEccResourceDataset -from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset -from .sap_bw_cube_dataset import SapBwCubeDataset -from .sybase_table_dataset import SybaseTableDataset -from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset -from .salesforce_object_dataset import SalesforceObjectDataset -from .microsoft_access_table_dataset import MicrosoftAccessTableDataset -from .postgre_sql_table_dataset import PostgreSqlTableDataset -from .my_sql_table_dataset import MySqlTableDataset -from .odbc_table_dataset import OdbcTableDataset -from .informix_table_dataset import InformixTableDataset -from .relational_table_dataset import RelationalTableDataset -from .db2_table_dataset import Db2TableDataset -from .amazon_redshift_table_dataset import AmazonRedshiftTableDataset -from .azure_my_sql_table_dataset import AzureMySqlTableDataset -from .teradata_table_dataset import TeradataTableDataset -from .oracle_table_dataset import OracleTableDataset -from .odata_resource_dataset import ODataResourceDataset -from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset -from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset -from .mongo_db_collection_dataset import MongoDbCollectionDataset -from .file_share_dataset import FileShareDataset -from .office365_dataset import Office365Dataset -from .azure_blob_fs_dataset import AzureBlobFSDataset -from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset -from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset -from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset -from .dynamics_entity_dataset import DynamicsEntityDataset -from .document_db_collection_dataset import DocumentDbCollectionDataset -from .custom_dataset import CustomDataset -from .cassandra_table_dataset import CassandraTableDataset -from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset -from .azure_sql_mi_table_dataset import AzureSqlMITableDataset -from .azure_sql_table_dataset import AzureSqlTableDataset -from .azure_table_dataset import AzureTableDataset -from .azure_blob_dataset import AzureBlobDataset -from .hdfs_location import HdfsLocation -from .http_server_location import HttpServerLocation -from .sftp_location import SftpLocation -from .ftp_server_location import FtpServerLocation -from .file_server_location import FileServerLocation -from .amazon_s3_location import AmazonS3Location -from .azure_data_lake_store_location import AzureDataLakeStoreLocation -from .azure_blob_fs_location import AzureBlobFSLocation -from .azure_blob_storage_location import AzureBlobStorageLocation -from .dataset_location import DatasetLocation -from .binary_dataset import BinaryDataset -from .json_dataset import JsonDataset -from .delimited_text_dataset import DelimitedTextDataset -from .parquet_dataset import ParquetDataset -from .avro_dataset import AvroDataset -from .amazon_s3_dataset import AmazonS3Dataset -from .activity_policy import ActivityPolicy -from .azure_function_activity import AzureFunctionActivity -from .databricks_spark_python_activity import DatabricksSparkPythonActivity -from .databricks_spark_jar_activity import DatabricksSparkJarActivity -from .databricks_notebook_activity import DatabricksNotebookActivity -from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity -from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity -from .azure_ml_web_service_file import AzureMLWebServiceFile -from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity -from .get_metadata_activity import GetMetadataActivity -from .web_activity_authentication import WebActivityAuthentication -from .web_activity import WebActivity -from .redshift_unload_settings import RedshiftUnloadSettings -from .amazon_redshift_source import AmazonRedshiftSource -from .google_ad_words_source import GoogleAdWordsSource -from .oracle_service_cloud_source import OracleServiceCloudSource -from .dynamics_ax_source import DynamicsAXSource -from .responsys_source import ResponsysSource -from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource -from .vertica_source import VerticaSource -from .netezza_partition_settings import NetezzaPartitionSettings -from .netezza_source import NetezzaSource -from .zoho_source import ZohoSource -from .xero_source import XeroSource -from .square_source import SquareSource -from .spark_source import SparkSource -from .shopify_source import ShopifySource -from .service_now_source import ServiceNowSource -from .quick_books_source import QuickBooksSource -from .presto_source import PrestoSource -from .phoenix_source import PhoenixSource -from .paypal_source import PaypalSource -from .marketo_source import MarketoSource -from .azure_maria_db_source import AzureMariaDBSource -from .maria_db_source import MariaDBSource -from .magento_source import MagentoSource -from .jira_source import JiraSource -from .impala_source import ImpalaSource -from .hubspot_source import HubspotSource -from .hive_source import HiveSource -from .hbase_source import HBaseSource -from .greenplum_source import GreenplumSource -from .google_big_query_source import GoogleBigQuerySource -from .eloqua_source import EloquaSource -from .drill_source import DrillSource -from .couchbase_source import CouchbaseSource -from .concur_source import ConcurSource -from .azure_postgre_sql_source import AzurePostgreSqlSource -from .amazon_mws_source import AmazonMWSSource -from .http_source import HttpSource -from .azure_blob_fs_source import AzureBlobFSSource -from .azure_data_lake_store_source import AzureDataLakeStoreSource -from .office365_source import Office365Source -from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties -from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource -from .mongo_db_v2_source import MongoDbV2Source -from .mongo_db_source import MongoDbSource -from .cassandra_source import CassandraSource -from .web_source import WebSource -from .teradata_partition_settings import TeradataPartitionSettings -from .teradata_source import TeradataSource -from .oracle_partition_settings import OraclePartitionSettings -from .oracle_source import OracleSource -from .azure_data_explorer_source import AzureDataExplorerSource -from .azure_my_sql_source import AzureMySqlSource -from .distcp_settings import DistcpSettings -from .hdfs_source import HdfsSource -from .file_system_source import FileSystemSource -from .sql_dw_source import SqlDWSource -from .stored_procedure_parameter import StoredProcedureParameter -from .sql_mi_source import SqlMISource -from .azure_sql_source import AzureSqlSource -from .sql_server_source import SqlServerSource -from .sql_source import SqlSource -from .rest_source import RestSource -from .sap_table_partition_settings import SapTablePartitionSettings -from .sap_table_source import SapTableSource -from .sap_open_hub_source import SapOpenHubSource -from .sap_hana_source import SapHanaSource -from .sap_ecc_source import SapEccSource -from .sap_cloud_for_customer_source import SapCloudForCustomerSource -from .salesforce_service_cloud_source import SalesforceServiceCloudSource -from .salesforce_source import SalesforceSource -from .odata_source import ODataSource -from .sap_bw_source import SapBwSource -from .sybase_source import SybaseSource -from .postgre_sql_source import PostgreSqlSource -from .my_sql_source import MySqlSource -from .odbc_source import OdbcSource -from .db2_source import Db2Source -from .microsoft_access_source import MicrosoftAccessSource -from .informix_source import InformixSource -from .relational_source import RelationalSource -from .common_data_service_for_apps_source import CommonDataServiceForAppsSource -from .dynamics_crm_source import DynamicsCrmSource -from .dynamics_source import DynamicsSource -from .document_db_collection_source import DocumentDbCollectionSource -from .blob_source import BlobSource -from .azure_table_source import AzureTableSource -from .hdfs_read_settings import HdfsReadSettings -from .http_read_settings import HttpReadSettings -from .sftp_read_settings import SftpReadSettings -from .ftp_read_settings import FtpReadSettings -from .file_server_read_settings import FileServerReadSettings -from .amazon_s3_read_settings import AmazonS3ReadSettings -from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings -from .azure_blob_fs_read_settings import AzureBlobFSReadSettings -from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings -from .store_read_settings import StoreReadSettings -from .binary_source import BinarySource -from .json_source import JsonSource -from .format_read_settings import FormatReadSettings -from .delimited_text_read_settings import DelimitedTextReadSettings -from .delimited_text_source import DelimitedTextSource -from .parquet_source import ParquetSource -from .avro_source import AvroSource -from .copy_source import CopySource -from .lookup_activity import LookupActivity -from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity -from .log_storage_settings import LogStorageSettings -from .delete_activity import DeleteActivity -from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity -from .custom_activity_reference_object import CustomActivityReferenceObject -from .custom_activity import CustomActivity -from .ssis_access_credential import SSISAccessCredential -from .ssis_log_location import SSISLogLocation -from .ssis_property_override import SSISPropertyOverride -from .ssis_execution_parameter import SSISExecutionParameter -from .ssis_execution_credential import SSISExecutionCredential -from .ssis_package_location import SSISPackageLocation -from .execute_ssis_package_activity import ExecuteSSISPackageActivity -from .hd_insight_spark_activity import HDInsightSparkActivity -from .hd_insight_streaming_activity import HDInsightStreamingActivity -from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity -from .hd_insight_pig_activity import HDInsightPigActivity -from .hd_insight_hive_activity import HDInsightHiveActivity -from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings -from .staging_settings import StagingSettings -from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink -from .salesforce_service_cloud_sink import SalesforceServiceCloudSink -from .salesforce_sink import SalesforceSink -from .azure_data_explorer_sink import AzureDataExplorerSink -from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink -from .dynamics_crm_sink import DynamicsCrmSink -from .dynamics_sink import DynamicsSink -from .microsoft_access_sink import MicrosoftAccessSink -from .informix_sink import InformixSink -from .odbc_sink import OdbcSink -from .azure_search_index_sink import AzureSearchIndexSink -from .azure_blob_fs_sink import AzureBlobFSSink -from .azure_data_lake_store_sink import AzureDataLakeStoreSink -from .oracle_sink import OracleSink -from .polybase_settings import PolybaseSettings -from .sql_dw_sink import SqlDWSink -from .sql_mi_sink import SqlMISink -from .azure_sql_sink import AzureSqlSink -from .sql_server_sink import SqlServerSink -from .sql_sink import SqlSink -from .document_db_collection_sink import DocumentDbCollectionSink -from .file_system_sink import FileSystemSink -from .blob_sink import BlobSink -from .file_server_write_settings import FileServerWriteSettings -from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings -from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings -from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings -from .store_write_settings import StoreWriteSettings -from .binary_sink import BinarySink -from .parquet_sink import ParquetSink -from .json_write_settings import JsonWriteSettings -from .delimited_text_write_settings import DelimitedTextWriteSettings -from .format_write_settings import FormatWriteSettings -from .avro_write_settings import AvroWriteSettings -from .avro_sink import AvroSink -from .azure_table_sink import AzureTableSink -from .azure_queue_sink import AzureQueueSink -from .sap_cloud_for_customer_sink import SapCloudForCustomerSink -from .azure_my_sql_sink import AzureMySqlSink -from .azure_postgre_sql_sink import AzurePostgreSqlSink -from .json_sink import JsonSink -from .delimited_text_sink import DelimitedTextSink -from .copy_sink import CopySink -from .copy_activity import CopyActivity -from .execution_activity import ExecutionActivity -from .web_hook_activity import WebHookActivity -from .append_variable_activity import AppendVariableActivity -from .set_variable_activity import SetVariableActivity -from .filter_activity import FilterActivity -from .validation_activity import ValidationActivity -from .until_activity import UntilActivity -from .wait_activity import WaitActivity -from .for_each_activity import ForEachActivity -from .if_condition_activity import IfConditionActivity -from .execute_pipeline_activity import ExecutePipelineActivity -from .control_activity import ControlActivity -from .linked_integration_runtime import LinkedIntegrationRuntime -from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode -from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus -from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult -from .managed_integration_runtime_error import ManagedIntegrationRuntimeError -from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode -from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus -from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization -from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization -from .linked_integration_runtime_type import LinkedIntegrationRuntimeType -from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime -from .entity_reference import EntityReference -from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties -from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties -from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo -from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties -from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties -from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties -from .managed_integration_runtime import ManagedIntegrationRuntime -from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress -from .ssis_variable import SsisVariable -from .ssis_environment import SsisEnvironment -from .ssis_parameter import SsisParameter -from .ssis_package import SsisPackage -from .ssis_environment_reference import SsisEnvironmentReference -from .ssis_project import SsisProject -from .ssis_folder import SsisFolder -from .ssis_object_metadata import SsisObjectMetadata -from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse -from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData -from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData -from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys -from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters -from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo +try: + from .resource_py3 import Resource + from .sub_resource_py3 import SubResource + from .expression_py3 import Expression + from .secure_string_py3 import SecureString + from .linked_service_reference_py3 import LinkedServiceReference + from .azure_key_vault_secret_reference_py3 import AzureKeyVaultSecretReference + from .secret_base_py3 import SecretBase + from .factory_identity_py3 import FactoryIdentity + from .factory_repo_configuration_py3 import FactoryRepoConfiguration + from .factory_py3 import Factory + from .integration_runtime_py3 import IntegrationRuntime + from .integration_runtime_resource_py3 import IntegrationRuntimeResource + from .integration_runtime_reference_py3 import IntegrationRuntimeReference + from .integration_runtime_status_py3 import IntegrationRuntimeStatus + from .integration_runtime_status_response_py3 import IntegrationRuntimeStatusResponse + from .integration_runtime_status_list_response_py3 import IntegrationRuntimeStatusListResponse + from .update_integration_runtime_request_py3 import UpdateIntegrationRuntimeRequest + from .update_integration_runtime_node_request_py3 import UpdateIntegrationRuntimeNodeRequest + from .linked_integration_runtime_request_py3 import LinkedIntegrationRuntimeRequest + from .create_linked_integration_runtime_request_py3 import CreateLinkedIntegrationRuntimeRequest + from .parameter_specification_py3 import ParameterSpecification + from .linked_service_py3 import LinkedService + from .linked_service_resource_py3 import LinkedServiceResource + from .dataset_folder_py3 import DatasetFolder + from .dataset_py3 import Dataset + from .dataset_resource_py3 import DatasetResource + from .activity_dependency_py3 import ActivityDependency + from .user_property_py3 import UserProperty + from .activity_py3 import Activity + from .variable_specification_py3 import VariableSpecification + from .pipeline_folder_py3 import PipelineFolder + from .pipeline_resource_py3 import PipelineResource + from .trigger_py3 import Trigger + from .trigger_resource_py3 import TriggerResource + from .create_run_response_py3 import CreateRunResponse + from .trigger_subscription_operation_status_py3 import TriggerSubscriptionOperationStatus + from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration + from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration + from .factory_repo_update_py3 import FactoryRepoUpdate + from .git_hub_access_token_request_py3 import GitHubAccessTokenRequest + from .git_hub_access_token_response_py3 import GitHubAccessTokenResponse + from .user_access_policy_py3 import UserAccessPolicy + from .access_policy_response_py3 import AccessPolicyResponse + from .pipeline_reference_py3 import PipelineReference + from .trigger_pipeline_reference_py3 import TriggerPipelineReference + from .factory_update_parameters_py3 import FactoryUpdateParameters + from .dataset_reference_py3 import DatasetReference + from .run_query_filter_py3 import RunQueryFilter + from .run_query_order_by_py3 import RunQueryOrderBy + from .run_filter_parameters_py3 import RunFilterParameters + from .pipeline_run_invoked_by_py3 import PipelineRunInvokedBy + from .pipeline_run_py3 import PipelineRun + from .pipeline_runs_query_response_py3 import PipelineRunsQueryResponse + from .activity_run_py3 import ActivityRun + from .activity_runs_query_response_py3 import ActivityRunsQueryResponse + from .trigger_run_py3 import TriggerRun + from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger + from .rerun_trigger_resource_py3 import RerunTriggerResource + from .operation_display_py3 import OperationDisplay + from .operation_log_specification_py3 import OperationLogSpecification + from .operation_metric_availability_py3 import OperationMetricAvailability + from .operation_metric_dimension_py3 import OperationMetricDimension + from .operation_metric_specification_py3 import OperationMetricSpecification + from .operation_service_specification_py3 import OperationServiceSpecification + from .operation_py3 import Operation + from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest + from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse + from .exposure_control_request_py3 import ExposureControlRequest + from .exposure_control_response_py3 import ExposureControlResponse + from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference_py3 import TriggerReference + from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference_py3 import TriggerDependencyReference + from .dependency_reference_py3 import DependencyReference + from .retry_policy_py3 import RetryPolicy + from .tumbling_window_trigger_py3 import TumblingWindowTrigger + from .blob_events_trigger_py3 import BlobEventsTrigger + from .blob_trigger_py3 import BlobTrigger + from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence + from .recurrence_schedule_py3 import RecurrenceSchedule + from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence + from .schedule_trigger_py3 import ScheduleTrigger + from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + from .azure_function_linked_service_py3 import AzureFunctionLinkedService + from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .sap_table_linked_service_py3 import SapTableLinkedService + from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService + from .responsys_linked_service_py3 import ResponsysLinkedService + from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService + from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService + from .script_action_py3 import ScriptAction + from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService + from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService + from .netezza_linked_service_py3 import NetezzaLinkedService + from .vertica_linked_service_py3 import VerticaLinkedService + from .zoho_linked_service_py3 import ZohoLinkedService + from .xero_linked_service_py3 import XeroLinkedService + from .square_linked_service_py3 import SquareLinkedService + from .spark_linked_service_py3 import SparkLinkedService + from .shopify_linked_service_py3 import ShopifyLinkedService + from .service_now_linked_service_py3 import ServiceNowLinkedService + from .quick_books_linked_service_py3 import QuickBooksLinkedService + from .presto_linked_service_py3 import PrestoLinkedService + from .phoenix_linked_service_py3 import PhoenixLinkedService + from .paypal_linked_service_py3 import PaypalLinkedService + from .marketo_linked_service_py3 import MarketoLinkedService + from .azure_maria_db_linked_service_py3 import AzureMariaDBLinkedService + from .maria_db_linked_service_py3 import MariaDBLinkedService + from .magento_linked_service_py3 import MagentoLinkedService + from .jira_linked_service_py3 import JiraLinkedService + from .impala_linked_service_py3 import ImpalaLinkedService + from .hubspot_linked_service_py3 import HubspotLinkedService + from .hive_linked_service_py3 import HiveLinkedService + from .hbase_linked_service_py3 import HBaseLinkedService + from .greenplum_linked_service_py3 import GreenplumLinkedService + from .google_big_query_linked_service_py3 import GoogleBigQueryLinkedService + from .eloqua_linked_service_py3 import EloquaLinkedService + from .drill_linked_service_py3 import DrillLinkedService + from .couchbase_linked_service_py3 import CouchbaseLinkedService + from .concur_linked_service_py3 import ConcurLinkedService + from .azure_postgre_sql_linked_service_py3 import AzurePostgreSqlLinkedService + from .amazon_mws_linked_service_py3 import AmazonMWSLinkedService + from .sap_hana_linked_service_py3 import SapHanaLinkedService + from .sap_bw_linked_service_py3 import SapBWLinkedService + from .sftp_server_linked_service_py3 import SftpServerLinkedService + from .ftp_server_linked_service_py3 import FtpServerLinkedService + from .http_linked_service_py3 import HttpLinkedService + from .azure_search_linked_service_py3 import AzureSearchLinkedService + from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService + from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService + from .amazon_s3_linked_service_py3 import AmazonS3LinkedService + from .rest_service_linked_service_py3 import RestServiceLinkedService + from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService + from .sap_ecc_linked_service_py3 import SapEccLinkedService + from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService + from .salesforce_linked_service_py3 import SalesforceLinkedService + from .office365_linked_service_py3 import Office365LinkedService + from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService + from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService + from .mongo_db_linked_service_py3 import MongoDbLinkedService + from .cassandra_linked_service_py3 import CassandraLinkedService + from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication + from .web_basic_authentication_py3 import WebBasicAuthentication + from .web_anonymous_authentication_py3 import WebAnonymousAuthentication + from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + from .web_linked_service_py3 import WebLinkedService + from .odata_linked_service_py3 import ODataLinkedService + from .hdfs_linked_service_py3 import HdfsLinkedService + from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService + from .informix_linked_service_py3 import InformixLinkedService + from .odbc_linked_service_py3 import OdbcLinkedService + from .azure_ml_linked_service_py3 import AzureMLLinkedService + from .teradata_linked_service_py3 import TeradataLinkedService + from .db2_linked_service_py3 import Db2LinkedService + from .sybase_linked_service_py3 import SybaseLinkedService + from .postgre_sql_linked_service_py3 import PostgreSqlLinkedService + from .my_sql_linked_service_py3 import MySqlLinkedService + from .azure_my_sql_linked_service_py3 import AzureMySqlLinkedService + from .oracle_linked_service_py3 import OracleLinkedService + from .file_server_linked_service_py3 import FileServerLinkedService + from .hd_insight_linked_service_py3 import HDInsightLinkedService + from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService + from .dynamics_linked_service_py3 import DynamicsLinkedService + from .cosmos_db_linked_service_py3 import CosmosDbLinkedService + from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService + from .azure_batch_linked_service_py3 import AzureBatchLinkedService + from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService + from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService + from .sql_server_linked_service_py3 import SqlServerLinkedService + from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService + from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService + from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService + from .azure_storage_linked_service_py3 import AzureStorageLinkedService + from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset + from .responsys_object_dataset_py3 import ResponsysObjectDataset + from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset + from .vertica_table_dataset_py3 import VerticaTableDataset + from .netezza_table_dataset_py3 import NetezzaTableDataset + from .zoho_object_dataset_py3 import ZohoObjectDataset + from .xero_object_dataset_py3 import XeroObjectDataset + from .square_object_dataset_py3 import SquareObjectDataset + from .spark_object_dataset_py3 import SparkObjectDataset + from .shopify_object_dataset_py3 import ShopifyObjectDataset + from .service_now_object_dataset_py3 import ServiceNowObjectDataset + from .quick_books_object_dataset_py3 import QuickBooksObjectDataset + from .presto_object_dataset_py3 import PrestoObjectDataset + from .phoenix_object_dataset_py3 import PhoenixObjectDataset + from .paypal_object_dataset_py3 import PaypalObjectDataset + from .marketo_object_dataset_py3 import MarketoObjectDataset + from .azure_maria_db_table_dataset_py3 import AzureMariaDBTableDataset + from .maria_db_table_dataset_py3 import MariaDBTableDataset + from .magento_object_dataset_py3 import MagentoObjectDataset + from .jira_object_dataset_py3 import JiraObjectDataset + from .impala_object_dataset_py3 import ImpalaObjectDataset + from .hubspot_object_dataset_py3 import HubspotObjectDataset + from .hive_object_dataset_py3 import HiveObjectDataset + from .hbase_object_dataset_py3 import HBaseObjectDataset + from .greenplum_table_dataset_py3 import GreenplumTableDataset + from .google_big_query_object_dataset_py3 import GoogleBigQueryObjectDataset + from .eloqua_object_dataset_py3 import EloquaObjectDataset + from .drill_table_dataset_py3 import DrillTableDataset + from .couchbase_table_dataset_py3 import CouchbaseTableDataset + from .concur_object_dataset_py3 import ConcurObjectDataset + from .azure_postgre_sql_table_dataset_py3 import AzurePostgreSqlTableDataset + from .amazon_mws_object_dataset_py3 import AmazonMWSObjectDataset + from .dataset_zip_deflate_compression_py3 import DatasetZipDeflateCompression + from .dataset_deflate_compression_py3 import DatasetDeflateCompression + from .dataset_gzip_compression_py3 import DatasetGZipCompression + from .dataset_bzip2_compression_py3 import DatasetBZip2Compression + from .dataset_compression_py3 import DatasetCompression + from .parquet_format_py3 import ParquetFormat + from .orc_format_py3 import OrcFormat + from .avro_format_py3 import AvroFormat + from .json_format_py3 import JsonFormat + from .text_format_py3 import TextFormat + from .dataset_storage_format_py3 import DatasetStorageFormat + from .http_dataset_py3 import HttpDataset + from .azure_search_index_dataset_py3 import AzureSearchIndexDataset + from .web_table_dataset_py3 import WebTableDataset + from .sap_table_resource_dataset_py3 import SapTableResourceDataset + from .rest_resource_dataset_py3 import RestResourceDataset + from .sql_server_table_dataset_py3 import SqlServerTableDataset + from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset + from .sap_hana_table_dataset_py3 import SapHanaTableDataset + from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset + from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset_py3 import SapBwCubeDataset + from .sybase_table_dataset_py3 import SybaseTableDataset + from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset + from .salesforce_object_dataset_py3 import SalesforceObjectDataset + from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset_py3 import PostgreSqlTableDataset + from .my_sql_table_dataset_py3 import MySqlTableDataset + from .odbc_table_dataset_py3 import OdbcTableDataset + from .informix_table_dataset_py3 import InformixTableDataset + from .relational_table_dataset_py3 import RelationalTableDataset + from .db2_table_dataset_py3 import Db2TableDataset + from .amazon_redshift_table_dataset_py3 import AmazonRedshiftTableDataset + from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .teradata_table_dataset_py3 import TeradataTableDataset + from .oracle_table_dataset_py3 import OracleTableDataset + from .odata_resource_dataset_py3 import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset + from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset + from .file_share_dataset_py3 import FileShareDataset + from .office365_dataset_py3 import Office365Dataset + from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset + from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset + from .dynamics_entity_dataset_py3 import DynamicsEntityDataset + from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset + from .custom_dataset_py3 import CustomDataset + from .cassandra_table_dataset_py3 import CassandraTableDataset + from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset + from .azure_sql_table_dataset_py3 import AzureSqlTableDataset + from .azure_table_dataset_py3 import AzureTableDataset + from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .binary_dataset_py3 import BinaryDataset + from .json_dataset_py3 import JsonDataset + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset + from .avro_dataset_py3 import AvroDataset + from .amazon_s3_dataset_py3 import AmazonS3Dataset + from .activity_policy_py3 import ActivityPolicy + from .azure_function_activity_py3 import AzureFunctionActivity + from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity + from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity + from .databricks_notebook_activity_py3 import DatabricksNotebookActivity + from .data_lake_analytics_usql_activity_py3 import DataLakeAnalyticsUSQLActivity + from .azure_ml_update_resource_activity_py3 import AzureMLUpdateResourceActivity + from .azure_ml_web_service_file_py3 import AzureMLWebServiceFile + from .azure_ml_batch_execution_activity_py3 import AzureMLBatchExecutionActivity + from .get_metadata_activity_py3 import GetMetadataActivity + from .web_activity_authentication_py3 import WebActivityAuthentication + from .web_activity_py3 import WebActivity + from .redshift_unload_settings_py3 import RedshiftUnloadSettings + from .amazon_redshift_source_py3 import AmazonRedshiftSource + from .google_ad_words_source_py3 import GoogleAdWordsSource + from .oracle_service_cloud_source_py3 import OracleServiceCloudSource + from .dynamics_ax_source_py3 import DynamicsAXSource + from .responsys_source_py3 import ResponsysSource + from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource + from .vertica_source_py3 import VerticaSource + from .netezza_partition_settings_py3 import NetezzaPartitionSettings + from .netezza_source_py3 import NetezzaSource + from .zoho_source_py3 import ZohoSource + from .xero_source_py3 import XeroSource + from .square_source_py3 import SquareSource + from .spark_source_py3 import SparkSource + from .shopify_source_py3 import ShopifySource + from .service_now_source_py3 import ServiceNowSource + from .quick_books_source_py3 import QuickBooksSource + from .presto_source_py3 import PrestoSource + from .phoenix_source_py3 import PhoenixSource + from .paypal_source_py3 import PaypalSource + from .marketo_source_py3 import MarketoSource + from .azure_maria_db_source_py3 import AzureMariaDBSource + from .maria_db_source_py3 import MariaDBSource + from .magento_source_py3 import MagentoSource + from .jira_source_py3 import JiraSource + from .impala_source_py3 import ImpalaSource + from .hubspot_source_py3 import HubspotSource + from .hive_source_py3 import HiveSource + from .hbase_source_py3 import HBaseSource + from .greenplum_source_py3 import GreenplumSource + from .google_big_query_source_py3 import GoogleBigQuerySource + from .eloqua_source_py3 import EloquaSource + from .drill_source_py3 import DrillSource + from .couchbase_source_py3 import CouchbaseSource + from .concur_source_py3 import ConcurSource + from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource + from .amazon_mws_source_py3 import AmazonMWSSource + from .http_source_py3 import HttpSource + from .azure_blob_fs_source_py3 import AzureBlobFSSource + from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource + from .office365_source_py3 import Office365Source + from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource + from .mongo_db_v2_source_py3 import MongoDbV2Source + from .mongo_db_source_py3 import MongoDbSource + from .cassandra_source_py3 import CassandraSource + from .web_source_py3 import WebSource + from .teradata_partition_settings_py3 import TeradataPartitionSettings + from .teradata_source_py3 import TeradataSource + from .oracle_partition_settings_py3 import OraclePartitionSettings + from .oracle_source_py3 import OracleSource + from .azure_data_explorer_source_py3 import AzureDataExplorerSource + from .azure_my_sql_source_py3 import AzureMySqlSource + from .distcp_settings_py3 import DistcpSettings + from .hdfs_source_py3 import HdfsSource + from .file_system_source_py3 import FileSystemSource + from .sql_dw_source_py3 import SqlDWSource + from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .sql_mi_source_py3 import SqlMISource + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource + from .sql_source_py3 import SqlSource + from .rest_source_py3 import RestSource + from .sap_table_partition_settings_py3 import SapTablePartitionSettings + from .sap_table_source_py3 import SapTableSource + from .sap_open_hub_source_py3 import SapOpenHubSource + from .sap_hana_source_py3 import SapHanaSource + from .sap_ecc_source_py3 import SapEccSource + from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource + from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource + from .salesforce_source_py3 import SalesforceSource + from .odata_source_py3 import ODataSource + from .sap_bw_source_py3 import SapBwSource + from .sybase_source_py3 import SybaseSource + from .postgre_sql_source_py3 import PostgreSqlSource + from .my_sql_source_py3 import MySqlSource + from .odbc_source_py3 import OdbcSource + from .db2_source_py3 import Db2Source + from .microsoft_access_source_py3 import MicrosoftAccessSource + from .informix_source_py3 import InformixSource + from .relational_source_py3 import RelationalSource + from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource + from .dynamics_crm_source_py3 import DynamicsCrmSource + from .dynamics_source_py3 import DynamicsSource + from .document_db_collection_source_py3 import DocumentDbCollectionSource + from .blob_source_py3 import BlobSource + from .azure_table_source_py3 import AzureTableSource + from .hdfs_read_settings_py3 import HdfsReadSettings + from .http_read_settings_py3 import HttpReadSettings + from .sftp_read_settings_py3 import SftpReadSettings + from .ftp_read_settings_py3 import FtpReadSettings + from .file_server_read_settings_py3 import FileServerReadSettings + from .amazon_s3_read_settings_py3 import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings_py3 import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings_py3 import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings_py3 import AzureBlobStorageReadSettings + from .store_read_settings_py3 import StoreReadSettings + from .binary_source_py3 import BinarySource + from .json_source_py3 import JsonSource + from .format_read_settings_py3 import FormatReadSettings + from .delimited_text_read_settings_py3 import DelimitedTextReadSettings + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource + from .avro_source_py3 import AvroSource + from .copy_source_py3 import CopySource + from .lookup_activity_py3 import LookupActivity + from .azure_data_explorer_command_activity_py3 import AzureDataExplorerCommandActivity + from .log_storage_settings_py3 import LogStorageSettings + from .delete_activity_py3 import DeleteActivity + from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity + from .custom_activity_reference_object_py3 import CustomActivityReferenceObject + from .custom_activity_py3 import CustomActivity + from .ssis_access_credential_py3 import SSISAccessCredential + from .ssis_log_location_py3 import SSISLogLocation + from .ssis_property_override_py3 import SSISPropertyOverride + from .ssis_execution_parameter_py3 import SSISExecutionParameter + from .ssis_execution_credential_py3 import SSISExecutionCredential + from .ssis_package_location_py3 import SSISPackageLocation + from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity + from .hd_insight_spark_activity_py3 import HDInsightSparkActivity + from .hd_insight_streaming_activity_py3 import HDInsightStreamingActivity + from .hd_insight_map_reduce_activity_py3 import HDInsightMapReduceActivity + from .hd_insight_pig_activity_py3 import HDInsightPigActivity + from .hd_insight_hive_activity_py3 import HDInsightHiveActivity + from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings + from .staging_settings_py3 import StagingSettings + from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink + from .salesforce_sink_py3 import SalesforceSink + from .azure_data_explorer_sink_py3 import AzureDataExplorerSink + from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink + from .dynamics_crm_sink_py3 import DynamicsCrmSink + from .dynamics_sink_py3 import DynamicsSink + from .microsoft_access_sink_py3 import MicrosoftAccessSink + from .informix_sink_py3 import InformixSink + from .odbc_sink_py3 import OdbcSink + from .azure_search_index_sink_py3 import AzureSearchIndexSink + from .azure_blob_fs_sink_py3 import AzureBlobFSSink + from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink + from .oracle_sink_py3 import OracleSink + from .polybase_settings_py3 import PolybaseSettings + from .sql_dw_sink_py3 import SqlDWSink + from .sql_mi_sink_py3 import SqlMISink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink + from .sql_sink_py3 import SqlSink + from .document_db_collection_sink_py3 import DocumentDbCollectionSink + from .file_system_sink_py3 import FileSystemSink + from .blob_sink_py3 import BlobSink + from .file_server_write_settings_py3 import FileServerWriteSettings + from .azure_data_lake_store_write_settings_py3 import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings_py3 import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings_py3 import AzureBlobStorageWriteSettings + from .store_write_settings_py3 import StoreWriteSettings + from .binary_sink_py3 import BinarySink + from .parquet_sink_py3 import ParquetSink + from .json_write_settings_py3 import JsonWriteSettings + from .delimited_text_write_settings_py3 import DelimitedTextWriteSettings + from .format_write_settings_py3 import FormatWriteSettings + from .avro_write_settings_py3 import AvroWriteSettings + from .avro_sink_py3 import AvroSink + from .azure_table_sink_py3 import AzureTableSink + from .azure_queue_sink_py3 import AzureQueueSink + from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .azure_my_sql_sink_py3 import AzureMySqlSink + from .azure_postgre_sql_sink_py3 import AzurePostgreSqlSink + from .json_sink_py3 import JsonSink + from .delimited_text_sink_py3 import DelimitedTextSink + from .copy_sink_py3 import CopySink + from .copy_activity_py3 import CopyActivity + from .execution_activity_py3 import ExecutionActivity + from .web_hook_activity_py3 import WebHookActivity + from .append_variable_activity_py3 import AppendVariableActivity + from .set_variable_activity_py3 import SetVariableActivity + from .filter_activity_py3 import FilterActivity + from .validation_activity_py3 import ValidationActivity + from .until_activity_py3 import UntilActivity + from .wait_activity_py3 import WaitActivity + from .for_each_activity_py3 import ForEachActivity + from .if_condition_activity_py3 import IfConditionActivity + from .execute_pipeline_activity_py3 import ExecutePipelineActivity + from .control_activity_py3 import ControlActivity + from .linked_integration_runtime_py3 import LinkedIntegrationRuntime + from .self_hosted_integration_runtime_node_py3 import SelfHostedIntegrationRuntimeNode + from .self_hosted_integration_runtime_status_py3 import SelfHostedIntegrationRuntimeStatus + from .managed_integration_runtime_operation_result_py3 import ManagedIntegrationRuntimeOperationResult + from .managed_integration_runtime_error_py3 import ManagedIntegrationRuntimeError + from .managed_integration_runtime_node_py3 import ManagedIntegrationRuntimeNode + from .managed_integration_runtime_status_py3 import ManagedIntegrationRuntimeStatus + from .linked_integration_runtime_rbac_authorization_py3 import LinkedIntegrationRuntimeRbacAuthorization + from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization + from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties + from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties + from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo + from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties + from .integration_runtime_vnet_properties_py3 import IntegrationRuntimeVNetProperties + from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties + from .managed_integration_runtime_py3 import ManagedIntegrationRuntime + from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress + from .ssis_variable_py3 import SsisVariable + from .ssis_environment_py3 import SsisEnvironment + from .ssis_parameter_py3 import SsisParameter + from .ssis_package_py3 import SsisPackage + from .ssis_environment_reference_py3 import SsisEnvironmentReference + from .ssis_project_py3 import SsisProject + from .ssis_folder_py3 import SsisFolder + from .ssis_object_metadata_py3 import SsisObjectMetadata + from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse + from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData + from .integration_runtime_monitoring_data_py3 import IntegrationRuntimeMonitoringData + from .integration_runtime_auth_keys_py3 import IntegrationRuntimeAuthKeys + from .integration_runtime_regenerate_key_parameters_py3 import IntegrationRuntimeRegenerateKeyParameters + from .integration_runtime_connection_info_py3 import IntegrationRuntimeConnectionInfo +except (SyntaxError, ImportError): + from .resource import Resource + from .sub_resource import SubResource + from .expression import Expression + from .secure_string import SecureString + from .linked_service_reference import LinkedServiceReference + from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference + from .secret_base import SecretBase + from .factory_identity import FactoryIdentity + from .factory_repo_configuration import FactoryRepoConfiguration + from .factory import Factory + from .integration_runtime import IntegrationRuntime + from .integration_runtime_resource import IntegrationRuntimeResource + from .integration_runtime_reference import IntegrationRuntimeReference + from .integration_runtime_status import IntegrationRuntimeStatus + from .integration_runtime_status_response import IntegrationRuntimeStatusResponse + from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse + from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest + from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest + from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest + from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest + from .parameter_specification import ParameterSpecification + from .linked_service import LinkedService + from .linked_service_resource import LinkedServiceResource + from .dataset_folder import DatasetFolder + from .dataset import Dataset + from .dataset_resource import DatasetResource + from .activity_dependency import ActivityDependency + from .user_property import UserProperty + from .activity import Activity + from .variable_specification import VariableSpecification + from .pipeline_folder import PipelineFolder + from .pipeline_resource import PipelineResource + from .trigger import Trigger + from .trigger_resource import TriggerResource + from .create_run_response import CreateRunResponse + from .trigger_subscription_operation_status import TriggerSubscriptionOperationStatus + from .factory_vsts_configuration import FactoryVSTSConfiguration + from .factory_git_hub_configuration import FactoryGitHubConfiguration + from .factory_repo_update import FactoryRepoUpdate + from .git_hub_access_token_request import GitHubAccessTokenRequest + from .git_hub_access_token_response import GitHubAccessTokenResponse + from .user_access_policy import UserAccessPolicy + from .access_policy_response import AccessPolicyResponse + from .pipeline_reference import PipelineReference + from .trigger_pipeline_reference import TriggerPipelineReference + from .factory_update_parameters import FactoryUpdateParameters + from .dataset_reference import DatasetReference + from .run_query_filter import RunQueryFilter + from .run_query_order_by import RunQueryOrderBy + from .run_filter_parameters import RunFilterParameters + from .pipeline_run_invoked_by import PipelineRunInvokedBy + from .pipeline_run import PipelineRun + from .pipeline_runs_query_response import PipelineRunsQueryResponse + from .activity_run import ActivityRun + from .activity_runs_query_response import ActivityRunsQueryResponse + from .trigger_run import TriggerRun + from .trigger_runs_query_response import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger + from .rerun_trigger_resource import RerunTriggerResource + from .operation_display import OperationDisplay + from .operation_log_specification import OperationLogSpecification + from .operation_metric_availability import OperationMetricAvailability + from .operation_metric_dimension import OperationMetricDimension + from .operation_metric_specification import OperationMetricSpecification + from .operation_service_specification import OperationServiceSpecification + from .operation import Operation + from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest + from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse + from .exposure_control_request import ExposureControlRequest + from .exposure_control_response import ExposureControlResponse + from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference import TriggerReference + from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference import TriggerDependencyReference + from .dependency_reference import DependencyReference + from .retry_policy import RetryPolicy + from .tumbling_window_trigger import TumblingWindowTrigger + from .blob_events_trigger import BlobEventsTrigger + from .blob_trigger import BlobTrigger + from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence + from .recurrence_schedule import RecurrenceSchedule + from .schedule_trigger_recurrence import ScheduleTriggerRecurrence + from .schedule_trigger import ScheduleTrigger + from .multiple_pipeline_trigger import MultiplePipelineTrigger + from .azure_function_linked_service import AzureFunctionLinkedService + from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .sap_table_linked_service import SapTableLinkedService + from .google_ad_words_linked_service import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service import DynamicsAXLinkedService + from .responsys_linked_service import ResponsysLinkedService + from .azure_databricks_linked_service import AzureDatabricksLinkedService + from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService + from .script_action import ScriptAction + from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService + from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService + from .netezza_linked_service import NetezzaLinkedService + from .vertica_linked_service import VerticaLinkedService + from .zoho_linked_service import ZohoLinkedService + from .xero_linked_service import XeroLinkedService + from .square_linked_service import SquareLinkedService + from .spark_linked_service import SparkLinkedService + from .shopify_linked_service import ShopifyLinkedService + from .service_now_linked_service import ServiceNowLinkedService + from .quick_books_linked_service import QuickBooksLinkedService + from .presto_linked_service import PrestoLinkedService + from .phoenix_linked_service import PhoenixLinkedService + from .paypal_linked_service import PaypalLinkedService + from .marketo_linked_service import MarketoLinkedService + from .azure_maria_db_linked_service import AzureMariaDBLinkedService + from .maria_db_linked_service import MariaDBLinkedService + from .magento_linked_service import MagentoLinkedService + from .jira_linked_service import JiraLinkedService + from .impala_linked_service import ImpalaLinkedService + from .hubspot_linked_service import HubspotLinkedService + from .hive_linked_service import HiveLinkedService + from .hbase_linked_service import HBaseLinkedService + from .greenplum_linked_service import GreenplumLinkedService + from .google_big_query_linked_service import GoogleBigQueryLinkedService + from .eloqua_linked_service import EloquaLinkedService + from .drill_linked_service import DrillLinkedService + from .couchbase_linked_service import CouchbaseLinkedService + from .concur_linked_service import ConcurLinkedService + from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService + from .amazon_mws_linked_service import AmazonMWSLinkedService + from .sap_hana_linked_service import SapHanaLinkedService + from .sap_bw_linked_service import SapBWLinkedService + from .sftp_server_linked_service import SftpServerLinkedService + from .ftp_server_linked_service import FtpServerLinkedService + from .http_linked_service import HttpLinkedService + from .azure_search_linked_service import AzureSearchLinkedService + from .custom_data_source_linked_service import CustomDataSourceLinkedService + from .amazon_redshift_linked_service import AmazonRedshiftLinkedService + from .amazon_s3_linked_service import AmazonS3LinkedService + from .rest_service_linked_service import RestServiceLinkedService + from .sap_open_hub_linked_service import SapOpenHubLinkedService + from .sap_ecc_linked_service import SapEccLinkedService + from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService + from .salesforce_linked_service import SalesforceLinkedService + from .office365_linked_service import Office365LinkedService + from .azure_blob_fs_linked_service import AzureBlobFSLinkedService + from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service import MongoDbV2LinkedService + from .mongo_db_linked_service import MongoDbLinkedService + from .cassandra_linked_service import CassandraLinkedService + from .web_client_certificate_authentication import WebClientCertificateAuthentication + from .web_basic_authentication import WebBasicAuthentication + from .web_anonymous_authentication import WebAnonymousAuthentication + from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + from .web_linked_service import WebLinkedService + from .odata_linked_service import ODataLinkedService + from .hdfs_linked_service import HdfsLinkedService + from .microsoft_access_linked_service import MicrosoftAccessLinkedService + from .informix_linked_service import InformixLinkedService + from .odbc_linked_service import OdbcLinkedService + from .azure_ml_linked_service import AzureMLLinkedService + from .teradata_linked_service import TeradataLinkedService + from .db2_linked_service import Db2LinkedService + from .sybase_linked_service import SybaseLinkedService + from .postgre_sql_linked_service import PostgreSqlLinkedService + from .my_sql_linked_service import MySqlLinkedService + from .azure_my_sql_linked_service import AzureMySqlLinkedService + from .oracle_linked_service import OracleLinkedService + from .file_server_linked_service import FileServerLinkedService + from .hd_insight_linked_service import HDInsightLinkedService + from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service import DynamicsCrmLinkedService + from .dynamics_linked_service import DynamicsLinkedService + from .cosmos_db_linked_service import CosmosDbLinkedService + from .azure_key_vault_linked_service import AzureKeyVaultLinkedService + from .azure_batch_linked_service import AzureBatchLinkedService + from .azure_sql_mi_linked_service import AzureSqlMILinkedService + from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService + from .sql_server_linked_service import SqlServerLinkedService + from .azure_sql_dw_linked_service import AzureSqlDWLinkedService + from .azure_table_storage_linked_service import AzureTableStorageLinkedService + from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService + from .azure_storage_linked_service import AzureStorageLinkedService + from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset + from .responsys_object_dataset import ResponsysObjectDataset + from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset + from .vertica_table_dataset import VerticaTableDataset + from .netezza_table_dataset import NetezzaTableDataset + from .zoho_object_dataset import ZohoObjectDataset + from .xero_object_dataset import XeroObjectDataset + from .square_object_dataset import SquareObjectDataset + from .spark_object_dataset import SparkObjectDataset + from .shopify_object_dataset import ShopifyObjectDataset + from .service_now_object_dataset import ServiceNowObjectDataset + from .quick_books_object_dataset import QuickBooksObjectDataset + from .presto_object_dataset import PrestoObjectDataset + from .phoenix_object_dataset import PhoenixObjectDataset + from .paypal_object_dataset import PaypalObjectDataset + from .marketo_object_dataset import MarketoObjectDataset + from .azure_maria_db_table_dataset import AzureMariaDBTableDataset + from .maria_db_table_dataset import MariaDBTableDataset + from .magento_object_dataset import MagentoObjectDataset + from .jira_object_dataset import JiraObjectDataset + from .impala_object_dataset import ImpalaObjectDataset + from .hubspot_object_dataset import HubspotObjectDataset + from .hive_object_dataset import HiveObjectDataset + from .hbase_object_dataset import HBaseObjectDataset + from .greenplum_table_dataset import GreenplumTableDataset + from .google_big_query_object_dataset import GoogleBigQueryObjectDataset + from .eloqua_object_dataset import EloquaObjectDataset + from .drill_table_dataset import DrillTableDataset + from .couchbase_table_dataset import CouchbaseTableDataset + from .concur_object_dataset import ConcurObjectDataset + from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset + from .amazon_mws_object_dataset import AmazonMWSObjectDataset + from .dataset_zip_deflate_compression import DatasetZipDeflateCompression + from .dataset_deflate_compression import DatasetDeflateCompression + from .dataset_gzip_compression import DatasetGZipCompression + from .dataset_bzip2_compression import DatasetBZip2Compression + from .dataset_compression import DatasetCompression + from .parquet_format import ParquetFormat + from .orc_format import OrcFormat + from .avro_format import AvroFormat + from .json_format import JsonFormat + from .text_format import TextFormat + from .dataset_storage_format import DatasetStorageFormat + from .http_dataset import HttpDataset + from .azure_search_index_dataset import AzureSearchIndexDataset + from .web_table_dataset import WebTableDataset + from .sap_table_resource_dataset import SapTableResourceDataset + from .rest_resource_dataset import RestResourceDataset + from .sql_server_table_dataset import SqlServerTableDataset + from .sap_open_hub_table_dataset import SapOpenHubTableDataset + from .sap_hana_table_dataset import SapHanaTableDataset + from .sap_ecc_resource_dataset import SapEccResourceDataset + from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .sap_bw_cube_dataset import SapBwCubeDataset + from .sybase_table_dataset import SybaseTableDataset + from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset + from .salesforce_object_dataset import SalesforceObjectDataset + from .microsoft_access_table_dataset import MicrosoftAccessTableDataset + from .postgre_sql_table_dataset import PostgreSqlTableDataset + from .my_sql_table_dataset import MySqlTableDataset + from .odbc_table_dataset import OdbcTableDataset + from .informix_table_dataset import InformixTableDataset + from .relational_table_dataset import RelationalTableDataset + from .db2_table_dataset import Db2TableDataset + from .amazon_redshift_table_dataset import AmazonRedshiftTableDataset + from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .teradata_table_dataset import TeradataTableDataset + from .oracle_table_dataset import OracleTableDataset + from .odata_resource_dataset import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset + from .mongo_db_collection_dataset import MongoDbCollectionDataset + from .file_share_dataset import FileShareDataset + from .office365_dataset import Office365Dataset + from .azure_blob_fs_dataset import AzureBlobFSDataset + from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset + from .dynamics_entity_dataset import DynamicsEntityDataset + from .document_db_collection_dataset import DocumentDbCollectionDataset + from .custom_dataset import CustomDataset + from .cassandra_table_dataset import CassandraTableDataset + from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset import AzureSqlMITableDataset + from .azure_sql_table_dataset import AzureSqlTableDataset + from .azure_table_dataset import AzureTableDataset + from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .binary_dataset import BinaryDataset + from .json_dataset import JsonDataset + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset + from .avro_dataset import AvroDataset + from .amazon_s3_dataset import AmazonS3Dataset + from .activity_policy import ActivityPolicy + from .azure_function_activity import AzureFunctionActivity + from .databricks_spark_python_activity import DatabricksSparkPythonActivity + from .databricks_spark_jar_activity import DatabricksSparkJarActivity + from .databricks_notebook_activity import DatabricksNotebookActivity + from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity + from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity + from .azure_ml_web_service_file import AzureMLWebServiceFile + from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity + from .get_metadata_activity import GetMetadataActivity + from .web_activity_authentication import WebActivityAuthentication + from .web_activity import WebActivity + from .redshift_unload_settings import RedshiftUnloadSettings + from .amazon_redshift_source import AmazonRedshiftSource + from .google_ad_words_source import GoogleAdWordsSource + from .oracle_service_cloud_source import OracleServiceCloudSource + from .dynamics_ax_source import DynamicsAXSource + from .responsys_source import ResponsysSource + from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource + from .vertica_source import VerticaSource + from .netezza_partition_settings import NetezzaPartitionSettings + from .netezza_source import NetezzaSource + from .zoho_source import ZohoSource + from .xero_source import XeroSource + from .square_source import SquareSource + from .spark_source import SparkSource + from .shopify_source import ShopifySource + from .service_now_source import ServiceNowSource + from .quick_books_source import QuickBooksSource + from .presto_source import PrestoSource + from .phoenix_source import PhoenixSource + from .paypal_source import PaypalSource + from .marketo_source import MarketoSource + from .azure_maria_db_source import AzureMariaDBSource + from .maria_db_source import MariaDBSource + from .magento_source import MagentoSource + from .jira_source import JiraSource + from .impala_source import ImpalaSource + from .hubspot_source import HubspotSource + from .hive_source import HiveSource + from .hbase_source import HBaseSource + from .greenplum_source import GreenplumSource + from .google_big_query_source import GoogleBigQuerySource + from .eloqua_source import EloquaSource + from .drill_source import DrillSource + from .couchbase_source import CouchbaseSource + from .concur_source import ConcurSource + from .azure_postgre_sql_source import AzurePostgreSqlSource + from .amazon_mws_source import AmazonMWSSource + from .http_source import HttpSource + from .azure_blob_fs_source import AzureBlobFSSource + from .azure_data_lake_store_source import AzureDataLakeStoreSource + from .office365_source import Office365Source + from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource + from .mongo_db_v2_source import MongoDbV2Source + from .mongo_db_source import MongoDbSource + from .cassandra_source import CassandraSource + from .web_source import WebSource + from .teradata_partition_settings import TeradataPartitionSettings + from .teradata_source import TeradataSource + from .oracle_partition_settings import OraclePartitionSettings + from .oracle_source import OracleSource + from .azure_data_explorer_source import AzureDataExplorerSource + from .azure_my_sql_source import AzureMySqlSource + from .distcp_settings import DistcpSettings + from .hdfs_source import HdfsSource + from .file_system_source import FileSystemSource + from .sql_dw_source import SqlDWSource + from .stored_procedure_parameter import StoredProcedureParameter + from .sql_mi_source import SqlMISource + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource + from .sql_source import SqlSource + from .rest_source import RestSource + from .sap_table_partition_settings import SapTablePartitionSettings + from .sap_table_source import SapTableSource + from .sap_open_hub_source import SapOpenHubSource + from .sap_hana_source import SapHanaSource + from .sap_ecc_source import SapEccSource + from .sap_cloud_for_customer_source import SapCloudForCustomerSource + from .salesforce_service_cloud_source import SalesforceServiceCloudSource + from .salesforce_source import SalesforceSource + from .odata_source import ODataSource + from .sap_bw_source import SapBwSource + from .sybase_source import SybaseSource + from .postgre_sql_source import PostgreSqlSource + from .my_sql_source import MySqlSource + from .odbc_source import OdbcSource + from .db2_source import Db2Source + from .microsoft_access_source import MicrosoftAccessSource + from .informix_source import InformixSource + from .relational_source import RelationalSource + from .common_data_service_for_apps_source import CommonDataServiceForAppsSource + from .dynamics_crm_source import DynamicsCrmSource + from .dynamics_source import DynamicsSource + from .document_db_collection_source import DocumentDbCollectionSource + from .blob_source import BlobSource + from .azure_table_source import AzureTableSource + from .hdfs_read_settings import HdfsReadSettings + from .http_read_settings import HttpReadSettings + from .sftp_read_settings import SftpReadSettings + from .ftp_read_settings import FtpReadSettings + from .file_server_read_settings import FileServerReadSettings + from .amazon_s3_read_settings import AmazonS3ReadSettings + from .azure_data_lake_store_read_settings import AzureDataLakeStoreReadSettings + from .azure_blob_fs_read_settings import AzureBlobFSReadSettings + from .azure_blob_storage_read_settings import AzureBlobStorageReadSettings + from .store_read_settings import StoreReadSettings + from .binary_source import BinarySource + from .json_source import JsonSource + from .format_read_settings import FormatReadSettings + from .delimited_text_read_settings import DelimitedTextReadSettings + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource + from .avro_source import AvroSource + from .copy_source import CopySource + from .lookup_activity import LookupActivity + from .azure_data_explorer_command_activity import AzureDataExplorerCommandActivity + from .log_storage_settings import LogStorageSettings + from .delete_activity import DeleteActivity + from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity + from .custom_activity_reference_object import CustomActivityReferenceObject + from .custom_activity import CustomActivity + from .ssis_access_credential import SSISAccessCredential + from .ssis_log_location import SSISLogLocation + from .ssis_property_override import SSISPropertyOverride + from .ssis_execution_parameter import SSISExecutionParameter + from .ssis_execution_credential import SSISExecutionCredential + from .ssis_package_location import SSISPackageLocation + from .execute_ssis_package_activity import ExecuteSSISPackageActivity + from .hd_insight_spark_activity import HDInsightSparkActivity + from .hd_insight_streaming_activity import HDInsightStreamingActivity + from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity + from .hd_insight_pig_activity import HDInsightPigActivity + from .hd_insight_hive_activity import HDInsightHiveActivity + from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings + from .staging_settings import StagingSettings + from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink import SalesforceServiceCloudSink + from .salesforce_sink import SalesforceSink + from .azure_data_explorer_sink import AzureDataExplorerSink + from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink + from .dynamics_crm_sink import DynamicsCrmSink + from .dynamics_sink import DynamicsSink + from .microsoft_access_sink import MicrosoftAccessSink + from .informix_sink import InformixSink + from .odbc_sink import OdbcSink + from .azure_search_index_sink import AzureSearchIndexSink + from .azure_blob_fs_sink import AzureBlobFSSink + from .azure_data_lake_store_sink import AzureDataLakeStoreSink + from .oracle_sink import OracleSink + from .polybase_settings import PolybaseSettings + from .sql_dw_sink import SqlDWSink + from .sql_mi_sink import SqlMISink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink + from .sql_sink import SqlSink + from .document_db_collection_sink import DocumentDbCollectionSink + from .file_system_sink import FileSystemSink + from .blob_sink import BlobSink + from .file_server_write_settings import FileServerWriteSettings + from .azure_data_lake_store_write_settings import AzureDataLakeStoreWriteSettings + from .azure_blob_fs_write_settings import AzureBlobFSWriteSettings + from .azure_blob_storage_write_settings import AzureBlobStorageWriteSettings + from .store_write_settings import StoreWriteSettings + from .binary_sink import BinarySink + from .parquet_sink import ParquetSink + from .json_write_settings import JsonWriteSettings + from .delimited_text_write_settings import DelimitedTextWriteSettings + from .format_write_settings import FormatWriteSettings + from .avro_write_settings import AvroWriteSettings + from .avro_sink import AvroSink + from .azure_table_sink import AzureTableSink + from .azure_queue_sink import AzureQueueSink + from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .azure_my_sql_sink import AzureMySqlSink + from .azure_postgre_sql_sink import AzurePostgreSqlSink + from .json_sink import JsonSink + from .delimited_text_sink import DelimitedTextSink + from .copy_sink import CopySink + from .copy_activity import CopyActivity + from .execution_activity import ExecutionActivity + from .web_hook_activity import WebHookActivity + from .append_variable_activity import AppendVariableActivity + from .set_variable_activity import SetVariableActivity + from .filter_activity import FilterActivity + from .validation_activity import ValidationActivity + from .until_activity import UntilActivity + from .wait_activity import WaitActivity + from .for_each_activity import ForEachActivity + from .if_condition_activity import IfConditionActivity + from .execute_pipeline_activity import ExecutePipelineActivity + from .control_activity import ControlActivity + from .linked_integration_runtime import LinkedIntegrationRuntime + from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode + from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus + from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult + from .managed_integration_runtime_error import ManagedIntegrationRuntimeError + from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode + from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus + from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization + from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization + from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties + from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties + from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo + from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties + from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties + from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties + from .managed_integration_runtime import ManagedIntegrationRuntime + from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress + from .ssis_variable import SsisVariable + from .ssis_environment import SsisEnvironment + from .ssis_parameter import SsisParameter + from .ssis_package import SsisPackage + from .ssis_environment_reference import SsisEnvironmentReference + from .ssis_project import SsisProject + from .ssis_folder import SsisFolder + from .ssis_object_metadata import SsisObjectMetadata + from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse + from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData + from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData + from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys + from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters + from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo from .operation_paged import OperationPaged from .factory_paged import FactoryPaged from .integration_runtime_resource_paged import IntegrationRuntimeResourcePaged diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py index cfe0a5de2371..033d0fd9591f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py @@ -29,8 +29,8 @@ class AccessPolicyResponse(Model): 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, } - def __init__(self, policy=None, access_token=None, data_plane_url=None): - super(AccessPolicyResponse, self).__init__() - self.policy = policy - self.access_token = access_token - self.data_plane_url = data_plane_url + def __init__(self, **kwargs): + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = kwargs.get('policy', None) + self.access_token = kwargs.get('access_token', None) + self.data_plane_url = kwargs.get('data_plane_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py new file mode 100644 index 000000000000..2932f547ff26 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = policy + self.access_token = access_token + self.data_plane_url = data_plane_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py index e6c03fc190a9..72d920f1d04c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py @@ -18,10 +18,12 @@ class Activity(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: ExecutionActivity, ControlActivity + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -29,7 +31,7 @@ class Activity(Model): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -51,11 +53,11 @@ class Activity(Model): 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None): - super(Activity, self).__init__() - self.additional_properties = additional_properties - self.name = name - self.description = description - self.depends_on = depends_on - self.user_properties = user_properties + def __init__(self, **kwargs): + super(Activity, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.depends_on = kwargs.get('depends_on', None) + self.user_properties = kwargs.get('user_properties', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py index ab346ecbe635..a15b34acc24f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py @@ -15,12 +15,15 @@ class ActivityDependency(Model): """Activity dependency information. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param activity: Activity name. + :param activity: Required. Activity name. :type activity: str - :param dependency_conditions: Match-Condition for the dependency. + :param dependency_conditions: Required. Match-Condition for the + dependency. :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ @@ -36,8 +39,8 @@ class ActivityDependency(Model): 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, } - def __init__(self, activity, dependency_conditions, additional_properties=None): - super(ActivityDependency, self).__init__() - self.additional_properties = additional_properties - self.activity = activity - self.dependency_conditions = dependency_conditions + def __init__(self, **kwargs): + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.activity = kwargs.get('activity', None) + self.dependency_conditions = kwargs.get('dependency_conditions', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py new file mode 100644 index 000000000000..2883a81a0adc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.activity = activity + self.dependency_conditions = dependency_conditions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py index 9577e1431442..4475cdbd9bea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py @@ -49,11 +49,11 @@ class ActivityPolicy(Model): 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, } - def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None, secure_input=None, secure_output=None): - super(ActivityPolicy, self).__init__() - self.additional_properties = additional_properties - self.timeout = timeout - self.retry = retry - self.retry_interval_in_seconds = retry_interval_in_seconds - self.secure_input = secure_input - self.secure_output = secure_output + def __init__(self, **kwargs): + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.timeout = kwargs.get('timeout', None) + self.retry = kwargs.get('retry', None) + self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) + self.secure_input = kwargs.get('secure_input', None) + self.secure_output = kwargs.get('secure_output', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py new file mode 100644 index 000000000000..52d469679974 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.timeout = timeout + self.retry = retry + self.retry_interval_in_seconds = retry_interval_in_seconds + self.secure_input = secure_input + self.secure_output = secure_output diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py new file mode 100644 index 000000000000..b5997c9352e1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(Activity, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.description = description + self.depends_on = depends_on + self.user_properties = user_properties + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py index 3492b892ef7f..901ffe23cd4e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py @@ -84,9 +84,9 @@ class ActivityRun(Model): 'error': {'key': 'error', 'type': 'object'}, } - def __init__(self, additional_properties=None): - super(ActivityRun, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.pipeline_name = None self.pipeline_run_id = None self.activity_name = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py new file mode 100644 index 000000000000..488e822de957 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py index f8cc1a9aa43e..2fcd25a5ced2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py @@ -15,7 +15,9 @@ class ActivityRunsQueryResponse(Model): """A list activity runs. - :param value: List of activity runs. + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. :type value: list[~azure.mgmt.datafactory.models.ActivityRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -31,7 +33,7 @@ class ActivityRunsQueryResponse(Model): 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } - def __init__(self, value, continuation_token=None): - super(ActivityRunsQueryResponse, self).__init__() - self.value = value - self.continuation_token = continuation_token + def __init__(self, **kwargs): + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py new file mode 100644 index 000000000000..ee3eae141635 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py index c3f2f6fa2499..b1e5ed533bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -15,6 +15,8 @@ class AmazonMWSLinkedService(LinkedService): """Amazon Marketplace Web Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,20 +31,20 @@ class AmazonMWSLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the Amazon MWS server, (i.e. + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) :type endpoint: object - :param marketplace_id: The Amazon Marketplace ID you want to retrieve data - from. To retrieve data from multiple Marketplace IDs, separate them with a - comma (,). (i.e. A2EUQ1WTGCTBG2) + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) :type marketplace_id: object - :param seller_id: The Amazon seller ID. + :param seller_id: Required. The Amazon seller ID. :type seller_id: object :param mws_auth_token: The Amazon MWS authentication token. :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_key_id: The access key id used to access data. + :param access_key_id: Required. The access key id used to access data. :type access_key_id: object :param secret_key: The secret key used to access data. :type secret_key: ~azure.mgmt.datafactory.models.SecretBase @@ -89,16 +91,16 @@ class AmazonMWSLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.marketplace_id = marketplace_id - self.seller_id = seller_id - self.mws_auth_token = mws_auth_token - self.access_key_id = access_key_id - self.secret_key = secret_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AmazonMWSLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.marketplace_id = kwargs.get('marketplace_id', None) + self.seller_id = kwargs.get('seller_id', None) + self.mws_auth_token = kwargs.get('mws_auth_token', None) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_key = kwargs.get('secret_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py new file mode 100644 index 000000000000..a8db63933154 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.marketplace_id = marketplace_id + self.seller_id = seller_id + self.mws_auth_token = mws_auth_token + self.access_key_id = access_key_id + self.secret_key = secret_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'AmazonMWS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py index 3bb4ec150825..9885f5c77d8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py @@ -15,6 +15,8 @@ class AmazonMWSObjectDataset(Dataset): """Amazon Marketplace Web Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AmazonMWSObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AmazonMWSObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class AmazonMWSObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(AmazonMWSObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py new file mode 100644 index 000000000000..015ed9401c15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AmazonMWSObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py index 7a84c35b3418..f9d034e610d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -15,6 +15,8 @@ class AmazonMWSSource(CopySource): """A copy activity Amazon Marketplace Web Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AmazonMWSSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class AmazonMWSSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(AmazonMWSSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py new file mode 100644 index 000000000000..9ef7f5b30244 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AmazonMWSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py index 117840257edf..4272b28c13f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -15,6 +15,8 @@ class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,18 +31,18 @@ class AmazonRedshiftLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param server: The name of the Amazon Redshift server. Type: string (or - Expression with resultType string). + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). :type server: object :param username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). :type username: object :param password: The password of the Amazon Redshift source. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param database: The database name of the Amazon Redshift source. Type: - string (or Expression with resultType string). + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). :type database: object :param port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer @@ -73,12 +75,12 @@ class AmazonRedshiftLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None): - super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.username = username - self.password = password - self.database = database - self.port = port - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AmazonRedshiftLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.database = kwargs.get('database', None) + self.port = kwargs.get('port', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py new file mode 100644 index 000000000000..3b84583c6c86 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.username = username + self.password = password + self.database = database + self.port = port + self.encrypted_credential = encrypted_credential + self.type = 'AmazonRedshift' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py index 61f7ae3c5cd4..d4fdfa4aa2ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -15,6 +15,8 @@ class AmazonRedshiftSource(CopySource): """A copy activity source for Amazon Redshift Source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AmazonRedshiftSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -56,8 +58,8 @@ class AmazonRedshiftSource(CopySource): 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None): - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.redshift_unload_settings = redshift_unload_settings + def __init__(self, **kwargs): + super(AmazonRedshiftSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py new file mode 100644 index 000000000000..9b34b2ef5b97 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.redshift_unload_settings = redshift_unload_settings + self.type = 'AmazonRedshiftSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py index a7f86a62e4d3..987151367421 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset.py @@ -15,6 +15,8 @@ class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AmazonRedshiftTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AmazonRedshiftTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class AmazonRedshiftTableDataset(Dataset): 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None): - super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema + def __init__(self, **kwargs): + super(AmazonRedshiftTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.amazon_redshift_table_dataset_schema = kwargs.get('amazon_redshift_table_dataset_schema', None) self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py new file mode 100644 index 000000000000..ceceaaba43e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonRedshiftTableDataset(Dataset): + """The Amazon Redshift table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The Amazon Redshift table name. Type: string (or Expression + with resultType string). + :type table: object + :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema + name. Type: string (or Expression with resultType string). + :type amazon_redshift_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: + super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema + self.type = 'AmazonRedshiftTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py index 534b7de2049d..e91a5ba26131 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -15,6 +15,8 @@ class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AmazonS3Dataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class AmazonS3Dataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param bucket_name: The name of the Amazon S3 bucket. Type: string (or - Expression with resultType string). + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). :type bucket_name: object :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). @@ -92,14 +94,14 @@ class AmazonS3Dataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None): - super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.bucket_name = bucket_name - self.key = key - self.prefix = prefix - self.version = version - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression + def __init__(self, **kwargs): + super(AmazonS3Dataset, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.key = kwargs.get('key', None) + self.prefix = kwargs.get('prefix', None) + self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py new file mode 100644 index 000000000000..d84ae48b2a46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.bucket_name = bucket_name + self.key = key + self.prefix = prefix + self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AmazonS3Object' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py index aecfbcacb30c..250518c1a7ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -15,6 +15,8 @@ class AmazonS3LinkedService(LinkedService): """Linked service for Amazon S3. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AmazonS3LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with @@ -66,10 +68,10 @@ class AmazonS3LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None): - super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.service_url = service_url - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AmazonS3LinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py new file mode 100644 index 000000000000..8d136bb71fc0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + self.type = 'AmazonS3' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py index 555e57d8adac..74c77a16f0f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -15,10 +15,12 @@ class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -47,7 +49,7 @@ class AmazonS3Location(DatasetLocation): 'version': {'key': 'version', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None): - super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) - self.bucket_name = bucket_name - self.version = version + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py index 72af6571d726..e83910136070 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings.py @@ -15,10 +15,12 @@ class AmazonS3ReadSettings(StoreReadSettings): """Azure data lake store read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -65,12 +67,12 @@ class AmazonS3ReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.prefix = prefix - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + def __init__(self, **kwargs): + super(AmazonS3ReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py new file mode 100644 index 000000000000..79645a869ac8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_settings_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py index a32f100fbde5..36a25e959061 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py @@ -15,10 +15,12 @@ class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class AppendVariableActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param variable_name: Name of the variable whose value needs to be appended to. @@ -51,8 +53,8 @@ class AppendVariableActivity(ControlActivity): 'value': {'key': 'typeProperties.value', 'type': 'object'}, } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, variable_name=None, value=None): - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.variable_name = variable_name - self.value = value + def __init__(self, **kwargs): + super(AppendVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py new file mode 100644 index 000000000000..4526a6e4a45e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'AppendVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py index febe10329e14..d206ac99ab85 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset.py @@ -15,6 +15,8 @@ class AvroDataset(Dataset): """Avro dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AvroDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,9 +41,9 @@ class AvroDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param location: The location of the avro storage. + :param location: Required. The location of the avro storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param avro_compression_codec: Possible values include: 'none', 'deflate', 'snappy', 'xz', 'bzip2' @@ -73,9 +75,9 @@ class AvroDataset(Dataset): 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } - def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level=None): - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.location = location - self.avro_compression_codec = avro_compression_codec - self.avro_compression_level = avro_compression_level + def __init__(self, **kwargs): + super(AvroDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py new file mode 100644 index 000000000000..f0f44dbbd786 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_dataset_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AvroDataset(Dataset): + """Avro dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the avro storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param avro_compression_codec: Possible values include: 'none', 'deflate', + 'snappy', 'xz', 'bzip2' + :type avro_compression_codec: str or + ~azure.mgmt.datafactory.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level + self.type = 'Avro' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py index 0a015516867e..f0346a76080c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py @@ -15,6 +15,8 @@ class AvroFormat(DatasetStorageFormat): """The data stored in Avro format. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class AvroFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -32,6 +34,13 @@ class AvroFormat(DatasetStorageFormat): 'type': {'required': True}, } - def __init__(self, additional_properties=None, serializer=None, deserializer=None): - super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroFormat, self).__init__(**kwargs) self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py new file mode 100644 index 000000000000..35d459c4b2a6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'AvroFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py index 2e3c5b260426..34d4ceb1e0f6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink.py @@ -15,6 +15,8 @@ class AvroSink(CopySink): """A copy activity Avro sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -60,8 +62,8 @@ class AvroSink(CopySink): 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings - self.format_settings = format_settings + def __init__(self, **kwargs): + super(AvroSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py new file mode 100644 index 000000000000..16363092dff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AvroSink(CopySink): + """A copy activity Avro sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'AvroSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py index 6203410a93a8..3ea2e7a2a76f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source.py @@ -15,6 +15,8 @@ class AvroSource(CopySource): """A copy activity Avro source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AvroSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -48,7 +50,7 @@ class AvroSource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings + def __init__(self, **kwargs): + super(AvroSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py new file mode 100644 index 000000000000..74b5e6db0fe2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Avro store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'AvroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py index 6d739312c2a5..ec068ee29885 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings.py @@ -15,10 +15,12 @@ class AvroWriteSettings(FormatWriteSettings): """Avro write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The write setting type. + :param type: Required. The write setting type. :type type: str :param record_name: Top level record name in write result, which is required in AVRO spec. @@ -38,7 +40,7 @@ class AvroWriteSettings(FormatWriteSettings): 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, } - def __init__(self, type, additional_properties=None, record_name=None, record_namespace=None): - super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type) - self.record_name = record_name - self.record_namespace = record_namespace + def __init__(self, **kwargs): + super(AvroWriteSettings, self).__init__(**kwargs) + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py new file mode 100644 index 000000000000..d14ebc4d1d29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param record_name: Top level record name in write result, which is + required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.record_name = record_name + self.record_namespace = record_namespace diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py index 32d07953ab5c..986023308e23 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -15,6 +15,8 @@ class AzureBatchLinkedService(LinkedService): """Azure Batch linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,20 +31,21 @@ class AzureBatchLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param account_name: The Azure Batch account name. Type: string (or - Expression with resultType string). + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). :type account_name: object :param access_key: The Azure Batch account access key. :type access_key: ~azure.mgmt.datafactory.models.SecretBase - :param batch_uri: The Azure Batch URI. Type: string (or Expression with - resultType string). + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). :type batch_uri: object - :param pool_name: The Azure Batch pool name. Type: string (or Expression - with resultType string). + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). :type pool_name: object - :param linked_service_name: The Azure Storage linked service reference. + :param linked_service_name: Required. The Azure Storage linked service + reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for @@ -74,12 +77,12 @@ class AzureBatchLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None): - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.account_name = account_name - self.access_key = access_key - self.batch_uri = batch_uri - self.pool_name = pool_name - self.linked_service_name = linked_service_name - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureBatchLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.access_key = kwargs.get('access_key', None) + self.batch_uri = kwargs.get('batch_uri', None) + self.pool_name = kwargs.get('pool_name', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py new file mode 100644 index 000000000000..e7d33dfb342a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureBatch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py index c02349fc7fe2..01814cf8f9a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -15,6 +15,8 @@ class AzureBlobDataset(Dataset): """The Azure Blob storage. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureBlobDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureBlobDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). @@ -86,13 +88,13 @@ class AzureBlobDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None): - super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.folder_path = folder_path - self.table_root_location = table_root_location - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.compression = compression + def __init__(self, **kwargs): + super(AzureBlobDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.table_root_location = kwargs.get('table_root_location', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py new file mode 100644 index 000000000000..706c39deb289 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.table_root_location = table_root_location + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AzureBlob' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py index db988ec7988d..0ef62ff7122f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -15,6 +15,8 @@ class AzureBlobFSDataset(Dataset): """The Azure Data Lake Storage Gen2 storage. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureBlobFSDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureBlobFSDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). @@ -74,10 +76,10 @@ class AzureBlobFSDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None): - super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py new file mode 100644 index 000000000000..82136a683fd3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py index 5b8d624d719f..262ce976227b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -15,6 +15,8 @@ class AzureBlobFSLinkedService(LinkedService): """Azure Data Lake Storage Gen2 linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class AzureBlobFSLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: Endpoint for the Azure Data Lake Storage Gen2 service. Type: - string (or Expression with resultType string). + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). :type url: object :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). @@ -73,12 +75,12 @@ class AzureBlobFSLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.account_key = account_key - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py new file mode 100644 index 000000000000..f0d555078bf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py index 775127e81ac3..c21525bbac4c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -15,10 +15,12 @@ class AzureBlobFSLocation(DatasetLocation): """The location of azure blobFS dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -43,6 +45,6 @@ class AzureBlobFSLocation(DatasetLocation): 'file_system': {'key': 'fileSystem', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, file_system=None): - super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) - self.file_system = file_system + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py index 03e35d1b4791..6d80ce72ea57 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings.py @@ -15,10 +15,12 @@ class AzureBlobFSReadSettings(StoreReadSettings): """Azure blobFS read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -61,11 +63,11 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): - super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + def __init__(self, **kwargs): + super(AzureBlobFSReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py new file mode 100644 index 000000000000..af4746e84f8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py index 586e9fe08c30..a47b173c6581 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -15,6 +15,8 @@ class AzureBlobFSSink(CopySink): """A copy activity Azure Data Lake Storage Gen2 sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object @@ -57,7 +59,7 @@ class AzureBlobFSSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None): - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.copy_behavior = copy_behavior + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py new file mode 100644 index 000000000000..e2b28bf30a8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py index cb74db742e84..0252ffd5ba8f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -15,6 +15,8 @@ class AzureBlobFSSource(CopySource): """A copy activity Azure BlobFS source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureBlobFSSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). @@ -58,9 +60,9 @@ class AzureBlobFSSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None): - super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py new file mode 100644 index 000000000000..5b512c1f334f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py index af32061a91a9..f91971b829f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings.py @@ -15,6 +15,8 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): """Azure blobFS write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). @@ -43,7 +45,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None): - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) - self.block_size_in_mb = block_size_in_mb + def __init__(self, **kwargs): + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py new file mode 100644 index 000000000000..351eae467183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobFSWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py index 43a62d85b5b6..5246e02ab9b4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -15,6 +15,8 @@ class AzureBlobStorageLinkedService(LinkedService): """The azure blob storage linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureBlobStorageLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or @@ -88,15 +90,15 @@ class AzureBlobStorageLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.service_endpoint = service_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureBlobStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.service_endpoint = kwargs.get('service_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py new file mode 100644 index 000000000000..ba0a511532b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py index 0dfe27f82df9..1efbbeaec352 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -15,10 +15,12 @@ class AzureBlobStorageLocation(DatasetLocation): """The location of azure blob dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -43,6 +45,6 @@ class AzureBlobStorageLocation(DatasetLocation): 'container': {'key': 'container', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, container=None): - super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) - self.container = container + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py index b9ae45f52d42..42b11cc6de16 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings.py @@ -15,10 +15,12 @@ class AzureBlobStorageReadSettings(StoreReadSettings): """Azure blob read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -61,11 +63,11 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + def __init__(self, **kwargs): + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py new file mode 100644 index 000000000000..495ea16afd98 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py index b0be564d2fc4..c2834839f28a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings.py @@ -15,6 +15,8 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): """Azure blob write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). @@ -43,7 +45,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None): - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) - self.block_size_in_mb = block_size_in_mb + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py new file mode 100644 index 000000000000..a37c83039a8c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobStorageWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py index 8d44febfbdb1..308d445d1726 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity.py @@ -15,10 +15,12 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): """Azure Data Explorer command activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,15 +28,16 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: A control command, according to the Azure Data Explorer - command syntax. Type: string (or Expression with resultType string). + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). :type command: object :param command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: @@ -61,8 +64,8 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } - def __init__(self, name, command, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None): - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.command = command - self.command_timeout = command_timeout + def __init__(self, **kwargs): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.command_timeout = kwargs.get('command_timeout', None) self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py new file mode 100644 index 000000000000..2f04dfddf08f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_command_activity_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data + Explorer command syntax. Type: string (or Expression with resultType + string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.command_timeout = command_timeout + self.type = 'AzureDataExplorerCommand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py index f14cc8b88946..5e5a9f7560c6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -15,6 +15,8 @@ class AzureDataExplorerLinkedService(LinkedService): """Azure Data Explorer (Kusto) linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,25 +31,25 @@ class AzureDataExplorerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of Azure Data Explorer (the engine's - endpoint). URL will be in the format + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) :type endpoint: object - :param service_principal_id: The ID of the service principal used to - authenticate against Azure Data Explorer. Type: string (or Expression with - resultType string). + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). :type service_principal_id: object - :param service_principal_key: The key of the service principal used to - authenticate against Kusto. + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param database: Database name for connection. Type: string (or Expression - with resultType string). + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). :type database: object - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). :type tenant: object """ @@ -74,11 +76,11 @@ class AzureDataExplorerLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } - def __init__(self, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py new file mode 100644 index 000000000000..3cd8ab9c3c19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py index 4fdec7ed5ea9..5c204ab769e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -15,6 +15,8 @@ class AzureDataExplorerSink(CopySink): """A copy activity Azure Data Explorer sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. @@ -66,9 +68,9 @@ class AzureDataExplorerSink(CopySink): 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None): - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.ingestion_mapping_name = ingestion_mapping_name - self.ingestion_mapping_as_json = ingestion_mapping_as_json - self.flush_immediately = flush_immediately + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py new file mode 100644 index 000000000000..e5cb67bc79b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py index 18c6017f6ad8..2caaa517efd5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -15,6 +15,8 @@ class AzureDataExplorerSource(CopySource): """A copy activity Azure Data Explorer (Kusto) source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class AzureDataExplorerSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param query: Database query. Should be a Kusto Query Language (KQL) - query. Type: string (or Expression with resultType string). + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). :type query: object :param no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count @@ -60,9 +62,9 @@ class AzureDataExplorerSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None): - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.no_truncation = no_truncation - self.query_timeout = query_timeout + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py new file mode 100644 index 000000000000..55a6bc78ee04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py index ed3ecf7687d4..594d22171f48 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -15,6 +15,8 @@ class AzureDataExplorerTableDataset(Dataset): """The Azure Data Explorer (Kusto) dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureDataExplorerTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureDataExplorerTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class AzureDataExplorerTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None): - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table = table + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py new file mode 100644 index 000000000000..d36b0f39c2fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py index bed62664372c..0381e1b1de65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -15,6 +15,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): """Azure Data Lake Analytics linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param account_name: The Azure Data Lake Analytics account name. Type: - string (or Expression with resultType string). + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). :type account_name: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string @@ -41,8 +43,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal - belongs. Type: string (or Expression with resultType string). + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with @@ -84,14 +86,14 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, account_name, tenant, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.account_name = account_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.data_lake_analytics_uri = data_lake_analytics_uri - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py new file mode 100644 index 000000000000..93250e2cef76 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeAnalytics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py index 5f8b1fa7e1ea..de15057f78ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -15,6 +15,8 @@ class AzureDataLakeStoreDataset(Dataset): """Azure Data Lake Store dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureDataLakeStoreDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureDataLakeStoreDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). @@ -75,10 +77,10 @@ class AzureDataLakeStoreDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None): - super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.folder_path = folder_path - self.file_name = file_name - self.format = format - self.compression = compression + def __init__(self, **kwargs): + super(AzureDataLakeStoreDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py new file mode 100644 index 000000000000..d2df0ffebe7e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureDataLakeStoreFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py index 7ee881709aaf..f08e086cb500 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -15,6 +15,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): """Azure Data Lake Store linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param data_lake_store_uri: Data Lake Store service URI. Type: string (or - Expression with resultType string). + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). :type data_lake_store_uri: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or @@ -83,14 +85,14 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, data_lake_store_uri, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None): - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.data_lake_store_uri = data_lake_store_uri - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.account_name = account_name - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) + self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.account_name = kwargs.get('account_name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py new file mode 100644 index 000000000000..7b8ab293c0cf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.data_lake_store_uri = data_lake_store_uri + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeStore' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py index 370f0ca78637..a4bf521a2005 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -15,10 +15,12 @@ class AzureDataLakeStoreLocation(DatasetLocation): """The location of azure data lake store dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -32,5 +34,12 @@ class AzureDataLakeStoreLocation(DatasetLocation): 'type': {'required': True}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): - super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py index fd9d1e2a18ac..213d69966baf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings.py @@ -15,10 +15,12 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): """Azure data lake store read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -61,11 +63,11 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py new file mode 100644 index 000000000000..b4bccc5e78a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index 89d27bc70416..e882698c2ca6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -15,6 +15,8 @@ class AzureDataLakeStoreSink(CopySink): """A copy activity Azure Data Lake Store sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object @@ -60,8 +62,8 @@ class AzureDataLakeStoreSink(CopySink): 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None): - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.copy_behavior = copy_behavior - self.enable_adls_single_file_parallel = enable_adls_single_file_parallel + def __init__(self, **kwargs): + super(AzureDataLakeStoreSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py new file mode 100644 index 000000000000..0f96cea725e2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel + self.type = 'AzureDataLakeStoreSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py index e2c97c39e570..9d2046049a30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -15,6 +15,8 @@ class AzureDataLakeStoreSource(CopySource): """A copy activity Azure Data Lake source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureDataLakeStoreSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType @@ -50,7 +52,7 @@ class AzureDataLakeStoreSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None): - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive + def __init__(self, **kwargs): + super(AzureDataLakeStoreSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py new file mode 100644 index 000000000000..e1d883972220 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'AzureDataLakeStoreSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py index 56dab7c2ddb2..6cf8deeacb07 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings.py @@ -15,6 +15,8 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): """Azure data lake store write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -32,6 +34,13 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'type': {'required': True}, } - def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None): - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py new file mode 100644 index 000000000000..0b9a0e38e41c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py index 15d44e98a22b..6cc4c12674cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -15,6 +15,8 @@ class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,14 +31,14 @@ class AzureDatabricksLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param domain: .azuredatabricks.net, domain name of your + :param domain: Required. .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param existing_cluster_id: The id of an existing cluster that will be @@ -106,19 +108,19 @@ class AzureDatabricksLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, domain, access_token, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None): - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.access_token = kwargs.get('access_token', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py new file mode 100644 index 000000000000..6299dac1e3f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py index 292beae2bf2e..68b02e5f771f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py @@ -15,10 +15,12 @@ class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,19 +28,20 @@ class AzureFunctionActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Rest API method for target endpoint. Possible values - include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod - :param function_name: Name of the Function that the Azure Function - Activity will call. Type: string (or Expression with resultType string) + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) :type function_name: object :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { @@ -73,10 +76,10 @@ class AzureFunctionActivity(ExecutionActivity): 'body': {'key': 'typeProperties.body', 'type': 'object'}, } - def __init__(self, name, method, function_name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None): - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.method = method - self.function_name = function_name - self.headers = headers - self.body = body + def __init__(self, **kwargs): + super(AzureFunctionActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.function_name = kwargs.get('function_name', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py new file mode 100644 index 000000000000..95bb1ca260e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body + self.type = 'AzureFunctionActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py index 581c4bd11b65..2ed5b870a778 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -15,6 +15,8 @@ class AzureFunctionLinkedService(LinkedService): """Azure Function linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class AzureFunctionLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param function_app_url: The endpoint of the Azure Function App. URL will - be in the format https://.azurewebsites.net. + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. :type function_app_url: object :param function_key: Function or Host key for Azure Function App. :type function_key: ~azure.mgmt.datafactory.models.SecretBase @@ -59,9 +61,9 @@ class AzureFunctionLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, function_app_url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None): - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.function_app_url = function_app_url - self.function_key = function_key - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureFunctionLinkedService, self).__init__(**kwargs) + self.function_app_url = kwargs.get('function_app_url', None) + self.function_key = kwargs.get('function_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py new file mode 100644 index 000000000000..a1bfdbe8b6c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.function_app_url = function_app_url + self.function_key = function_key + self.encrypted_credential = encrypted_credential + self.type = 'AzureFunction' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py index 97daf26a30b8..768f0d83ae93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -15,6 +15,8 @@ class AzureKeyVaultLinkedService(LinkedService): """Azure Key Vault linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class AzureKeyVaultLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param base_url: The base URL of the Azure Key Vault. e.g. + :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object @@ -52,7 +54,7 @@ class AzureKeyVaultLinkedService(LinkedService): 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } - def __init__(self, base_url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.base_url = base_url + def __init__(self, **kwargs): + super(AzureKeyVaultLinkedService, self).__init__(**kwargs) + self.base_url = kwargs.get('base_url', None) self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py new file mode 100644 index 000000000000..50f4a58a5a1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.base_url = base_url + self.type = 'AzureKeyVault' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py index 9e5e976fa083..28d3e7d31cee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py @@ -15,12 +15,14 @@ class AzureKeyVaultSecretReference(SecretBase): """Azure Key Vault secret reference. - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str - :param store: The Azure Key Vault linked service reference. + :param store: Required. The Azure Key Vault linked service reference. :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param secret_name: The name of the secret in Azure Key Vault. Type: - string (or Expression with resultType string). + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). :type secret_name: object :param secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or @@ -41,9 +43,9 @@ class AzureKeyVaultSecretReference(SecretBase): 'secret_version': {'key': 'secretVersion', 'type': 'object'}, } - def __init__(self, store, secret_name, secret_version=None): - super(AzureKeyVaultSecretReference, self).__init__() - self.store = store - self.secret_name = secret_name - self.secret_version = secret_version + def __init__(self, **kwargs): + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = kwargs.get('store', None) + self.secret_name = kwargs.get('secret_name', None) + self.secret_version = kwargs.get('secret_version', None) self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py new file mode 100644 index 000000000000..c5fe4c7afbd4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base_py3 import SecretBase + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version + self.type = 'AzureKeyVaultSecret' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py index 4f0f8b9dcb9f..d2dc7db88851 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service.py @@ -15,6 +15,8 @@ class AzureMariaDBLinkedService(LinkedService): """Azure Database for MariaDB linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureMariaDBLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -59,9 +61,9 @@ class AzureMariaDBLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py new file mode 100644 index 000000000000..c80015ed6b45 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'AzureMariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py index b87be8d78c18..229e6f4311e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source.py @@ -15,6 +15,8 @@ class AzureMariaDBSource(CopySource): """A copy activity Azure MariaDB source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureMariaDBSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class AzureMariaDBSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(AzureMariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py new file mode 100644 index 000000000000..11358f899e51 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMariaDBSource(CopySource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py index 7076e0e57d91..a06c722279f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset.py @@ -15,6 +15,8 @@ class AzureMariaDBTableDataset(Dataset): """Azure Database for MariaDB dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureMariaDBTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureMariaDBTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class AzureMariaDBTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..9c6fd648af20 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py index 8e50d2477375..f6c7c75a1299 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py @@ -15,10 +15,12 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): """Azure ML Batch Execution activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -72,9 +74,9 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None): - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.global_parameters = global_parameters - self.web_service_outputs = web_service_outputs - self.web_service_inputs = web_service_inputs + def __init__(self, **kwargs): + super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) + self.global_parameters = kwargs.get('global_parameters', None) + self.web_service_outputs = kwargs.get('web_service_outputs', None) + self.web_service_inputs = kwargs.get('web_service_inputs', None) self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py new file mode 100644 index 000000000000..e273c0b38128 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs + self.type = 'AzureMLBatchExecution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py index 54ece64b3755..08dfec98a6bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -15,6 +15,8 @@ class AzureMLLinkedService(LinkedService): """Azure ML Web Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,12 +31,13 @@ class AzureMLLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param ml_endpoint: The Batch Execution REST URL for an Azure ML Web - Service endpoint. Type: string (or Expression with resultType string). + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). :type ml_endpoint: object - :param api_key: The API key for accessing the Azure ML model endpoint. + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Web Service endpoint. Type: string (or Expression with resultType @@ -79,13 +82,13 @@ class AzureMLLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, ml_endpoint, api_key, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.ml_endpoint = ml_endpoint - self.api_key = api_key - self.update_resource_endpoint = update_resource_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureMLLinkedService, self).__init__(**kwargs) + self.ml_endpoint = kwargs.get('ml_endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py new file mode 100644 index 000000000000..c77a692adc03 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureML' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py index ef1ef1c49aca..c47a2d81648e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py @@ -15,10 +15,12 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): """Azure ML Update Resource management activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,23 +28,23 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param trained_model_name: Name of the Trained Model module in the Web - Service experiment to be updated. Type: string (or Expression with + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with resultType string). :type trained_model_name: object - :param trained_model_linked_service_name: Name of Azure Storage linked - service holding the .ilearner file that will be uploaded by the update - operation. + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param trained_model_file_path: The relative file path in + :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). @@ -71,9 +73,9 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } - def __init__(self, name, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None): - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.trained_model_name = trained_model_name - self.trained_model_linked_service_name = trained_model_linked_service_name - self.trained_model_file_path = trained_model_file_path + def __init__(self, **kwargs): + super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) + self.trained_model_name = kwargs.get('trained_model_name', None) + self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) + self.trained_model_file_path = kwargs.get('trained_model_file_path', None) self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py new file mode 100644 index 000000000000..50a5932f0bf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path + self.type = 'AzureMLUpdateResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py index 381eef2be708..682b24fed830 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py @@ -15,12 +15,14 @@ class AzureMLWebServiceFile(Model): """Azure ML WebService Input/Output file. - :param file_path: The relative file path, including container name, in the - Azure Blob Storage specified by the LinkedService. Type: string (or - Expression with resultType string). + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). :type file_path: object - :param linked_service_name: Reference to an Azure Storage LinkedService, - where Azure ML WebService Input/Output file located. + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ @@ -35,7 +37,7 @@ class AzureMLWebServiceFile(Model): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, } - def __init__(self, file_path, linked_service_name): - super(AzureMLWebServiceFile, self).__init__() - self.file_path = file_path - self.linked_service_name = linked_service_name + def __init__(self, **kwargs): + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = kwargs.get('file_path', None) + self.linked_service_name = kwargs.get('linked_service_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py new file mode 100644 index 000000000000..abe75d9d9bf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = file_path + self.linked_service_name = linked_service_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py index fe14ad2966e6..aedbdbb73eb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -15,6 +15,8 @@ class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class AzureMySqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -61,9 +63,9 @@ class AzureMySqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureMySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py new file mode 100644 index 000000000000..57692275f564 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzureMySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py index 255a4c4fa89b..b3ee0bbc8645 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink.py @@ -15,6 +15,8 @@ class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -58,7 +60,7 @@ class AzureMySqlSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script + def __init__(self, **kwargs): + super(AzureMySqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py new file mode 100644 index 000000000000..340c10f5988b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzureMySqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py index e76460749826..823336432567 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -15,6 +15,8 @@ class AzureMySqlSource(CopySource): """A copy activity Azure MySQL source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureMySqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class AzureMySqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(AzureMySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py new file mode 100644 index 000000000000..7030738d2615 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py index b8b80bf78b6f..8f5d43478089 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py @@ -15,6 +15,8 @@ class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureMySqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureMySqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class AzureMySqlTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(AzureMySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..7bd7eb6f17f8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py index 99709300a746..92359d6d6a10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -15,6 +15,8 @@ class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzurePostgreSqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -60,9 +62,9 @@ class AzurePostgreSqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None): - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py new file mode 100644 index 000000000000..47f8f17980f8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzurePostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py index 43c35505301d..6214e1ba1f22 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink.py @@ -15,6 +15,8 @@ class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -58,7 +60,7 @@ class AzurePostgreSqlSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script + def __init__(self, **kwargs): + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py new file mode 100644 index 000000000000..b7cd0ec51a29 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'AzurePostgreSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py index 137b34f516ea..e0cd62fd8028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -15,6 +15,8 @@ class AzurePostgreSqlSource(CopySource): """A copy activity Azure PostgreSQL source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzurePostgreSqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class AzurePostgreSqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(AzurePostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py new file mode 100644 index 000000000000..0362b0dca390 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzurePostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py index 012f7a3f2036..933264b57a9b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py @@ -15,6 +15,8 @@ class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzurePostgreSqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzurePostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with @@ -74,9 +76,9 @@ class AzurePostgreSqlTableDataset(Dataset): 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None): - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema + def __init__(self, **kwargs): + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.azure_postgre_sql_table_dataset_schema = kwargs.get('azure_postgre_sql_table_dataset_schema', None) self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..485dc3efb102 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure PostgreSQL database which + includes both schema and table. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type table: object + :param azure_postgre_sql_table_dataset_schema: The schema name of the + Azure PostgreSQL database. Type: string (or Expression with resultType + string). + :type azure_postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema + self.type = 'AzurePostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py index c7ab9fdd8579..9f3a63db4978 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -15,6 +15,8 @@ class AzureQueueSink(CopySink): """A copy activity Azure Queue sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -44,6 +46,16 @@ class AzureQueueSink(CopySink): 'type': {'required': True}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None): - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureQueueSink, self).__init__(**kwargs) self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py new file mode 100644 index 000000000000..db2fb60ddb1e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureQueueSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py index ea61b0305b24..1239bbad78fc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py @@ -15,6 +15,8 @@ class AzureSearchIndexDataset(Dataset): """The Azure Search Index. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureSearchIndexDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class AzureSearchIndexDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param index_name: The name of the Azure Search Index. Type: string (or - Expression with resultType string). + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). :type index_name: object """ @@ -65,7 +67,7 @@ class AzureSearchIndexDataset(Dataset): 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } - def __init__(self, linked_service_name, index_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.index_name = index_name + def __init__(self, **kwargs): + super(AzureSearchIndexDataset, self).__init__(**kwargs) + self.index_name = kwargs.get('index_name', None) self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py new file mode 100644 index 000000000000..da5e92dd2edd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index_name = index_name + self.type = 'AzureSearchIndex' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index a024632fccf0..9aae64af8da0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -15,6 +15,8 @@ class AzureSearchIndexSink(CopySink): """A copy activity Azure Search Index sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: 'Merge', 'Upload' @@ -59,7 +61,7 @@ class AzureSearchIndexSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None): - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.write_behavior = write_behavior + def __init__(self, **kwargs): + super(AzureSearchIndexSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py new file mode 100644 index 000000000000..3cd887a2512c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. Possible values include: 'Merge', 'Upload' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'AzureSearchIndexSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py index 6433182e9e36..782799cd5b28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -15,6 +15,8 @@ class AzureSearchLinkedService(LinkedService): """Linked service for Windows Azure Search Service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class AzureSearchLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: URL for Azure Search service. Type: string (or Expression with - resultType string). + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). :type url: object :param key: Admin Key for Azure Search service :type key: ~azure.mgmt.datafactory.models.SecretBase @@ -59,9 +61,9 @@ class AzureSearchLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, key=None, encrypted_credential=None): - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.key = key - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureSearchLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.key = kwargs.get('key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py new file mode 100644 index 000000000000..8589c3aead91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential + self.type = 'AzureSearch' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py index effb7eb59abc..0da66637a04f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -15,6 +15,8 @@ class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -74,12 +76,12 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py new file mode 100644 index 000000000000..dbcf6c88b134 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDatabase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py index 848003cb5076..cc7c9d58d19f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -15,6 +15,8 @@ class AzureSqlDWLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class AzureSqlDWLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object @@ -75,12 +77,12 @@ class AzureSqlDWLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureSqlDWLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py new file mode 100644 index 000000000000..5c75f3904b37 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py index d9f0afadea50..ed9fe8904d73 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -15,6 +15,8 @@ class AzureSqlDWTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureSqlDWTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureSqlDWTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class AzureSqlDWTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None): - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(AzureSqlDWTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_dw_table_dataset_schema = kwargs.get('azure_sql_dw_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py new file mode 100644 index 000000000000..a38e4ab479c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL + Data Warehouse. Type: string (or Expression with resultType string). + :type azure_sql_dw_table_dataset_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string + (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema + self.table = table + self.type = 'AzureSqlDWTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py index 61fd2b4a086c..2aab3a145ff2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py @@ -15,6 +15,8 @@ class AzureSqlMILinkedService(LinkedService): """Azure SQL Managed Instance linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class AzureSqlMILinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -74,12 +76,12 @@ class AzureSqlMILinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None): - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py new file mode 100644 index 000000000000..ec1a2e5e8549 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py index ea3b9da76b59..1128a9e8cb06 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py @@ -15,6 +15,8 @@ class AzureSqlMITableDataset(Dataset): """The Azure SQL Managed Instance dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureSqlMITableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureSqlMITableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class AzureSqlMITableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None): - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py new file mode 100644 index 000000000000..ac72614e3ed4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py index 2658443950a3..5d93df3d790a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -15,6 +15,8 @@ class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -80,12 +82,12 @@ class AzureSqlSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..e4d5e66e18c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'AzureSqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py index 6595bded5f78..b6c62f9a3164 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -15,6 +15,8 @@ class AzureSqlSource(CopySource): """A copy activity Azure SQL source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureSqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -62,10 +64,10 @@ class AzureSqlSource(CopySource): 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py index 29e98f4e0525..ce8b08944f3a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -15,6 +15,8 @@ class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureSqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class AzureSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class AzureSqlTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None): - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(AzureSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_table_dataset_schema = kwargs.get('azure_sql_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py new file mode 100644 index 000000000000..3ed19ee47e7e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_table_dataset_schema: The schema name of the Azure SQL + database. Type: string (or Expression with resultType string). + :type azure_sql_table_dataset_schema: object + :param table: The table name of the Azure SQL database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema + self.table = table + self.type = 'AzureSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py index b5da68a504f2..202dd7229b90 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -15,6 +15,8 @@ class AzureStorageLinkedService(LinkedService): """The storage account linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureStorageLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or @@ -71,11 +73,11 @@ class AzureStorageLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential=None): - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py new file mode 100644 index 000000000000..4fac19b70849 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py index d9ebbf0858b7..eb8dacbfbb98 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py @@ -15,6 +15,8 @@ class AzureTableDataset(Dataset): """The Azure Table storage dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class AzureTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class AzureTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param table_name: The table name of the Azure Table storage. Type: string - (or Expression with resultType string). + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). :type table_name: object """ @@ -65,7 +67,7 @@ class AzureTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, table_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(AzureTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py new file mode 100644 index 000000000000..d70a15fdd6f1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py index 0a171bc176d3..3459c9ad3ba1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -15,6 +15,8 @@ class AzureTableSink(CopySink): """A copy activity Azure Table sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). @@ -70,10 +72,10 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None): - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.azure_table_default_partition_key_value = azure_table_default_partition_key_value - self.azure_table_partition_key_name = azure_table_partition_key_name - self.azure_table_row_key_name = azure_table_row_key_name - self.azure_table_insert_type = azure_table_insert_type + def __init__(self, **kwargs): + super(AzureTableSink, self).__init__(**kwargs) + self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) + self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) + self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) + self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py new file mode 100644 index 000000000000..a15247544879 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type + self.type = 'AzureTableSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py index 74cd88d06fe5..fa7ead73eaa9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -15,6 +15,8 @@ class AzureTableSource(CopySource): """A copy activity Azure Table source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureTableSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). @@ -54,8 +56,8 @@ class AzureTableSource(CopySource): 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None): - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.azure_table_source_query = azure_table_source_query - self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found + def __init__(self, **kwargs): + super(AzureTableSource, self).__init__(**kwargs) + self.azure_table_source_query = kwargs.get('azure_table_source_query', None) + self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py new file mode 100644 index 000000000000..efbac5613219 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found + self.type = 'AzureTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py index b120959ddc85..c2a8c2498ea6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -15,6 +15,8 @@ class AzureTableStorageLinkedService(LinkedService): """The azure table storage linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class AzureTableStorageLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or @@ -71,11 +73,11 @@ class AzureTableStorageLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential=None): - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(AzureTableStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py new file mode 100644 index 000000000000..8d4e62c4f3e6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureTableStorage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py index 8a6b1456862e..5f0f8ef96696 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset.py @@ -15,6 +15,8 @@ class BinaryDataset(Dataset): """Binary dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class BinaryDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,9 +41,9 @@ class BinaryDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param location: The location of the Binary storage. + :param location: Required. The location of the Binary storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression: The data compression method used for the binary dataset. @@ -68,8 +70,8 @@ class BinaryDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None): - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.location = location - self.compression = compression + def __init__(self, **kwargs): + super(BinaryDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py new file mode 100644 index 000000000000..7d26b216fd7a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class BinaryDataset(Dataset): + """Binary dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the Binary storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression: The data compression method used for the binary + dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression = compression + self.type = 'Binary' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py index 4c8c886fdded..b991bfee53c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink.py @@ -15,6 +15,8 @@ class BinarySink(CopySink): """A copy activity Binary sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -57,7 +59,7 @@ class BinarySink(CopySink): 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None): - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings + def __init__(self, **kwargs): + super(BinarySink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py new file mode 100644 index 000000000000..80421d161aed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BinarySink(CopySink): + """A copy activity Binary sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py index de4d6d964e12..48e78e7d24bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source.py @@ -15,6 +15,8 @@ class BinarySource(CopySource): """A copy activity Binary source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class BinarySource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -48,7 +50,7 @@ class BinarySource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings + def __init__(self, **kwargs): + super(BinarySource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py new file mode 100644 index 000000000000..aa9a9f1412ab --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/binary_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Binary store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'BinarySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py index 9caa7f29f6d2..673d34167fed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -18,6 +18,8 @@ class BlobEventsTrigger(MultiplePipelineTrigger): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,7 +33,7 @@ class BlobEventsTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: @@ -47,9 +49,10 @@ class BlobEventsTrigger(MultiplePipelineTrigger): fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. :type blob_path_ends_with: str - :param events: The type of events that cause this trigger to fire. + :param events: Required. The type of events that cause this trigger to + fire. :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] - :param scope: The ARM resource ID of the Storage Account. + :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ @@ -73,10 +76,10 @@ class BlobEventsTrigger(MultiplePipelineTrigger): 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } - def __init__(self, events, scope, additional_properties=None, description=None, annotations=None, pipelines=None, blob_path_begins_with=None, blob_path_ends_with=None): - super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines) - self.blob_path_begins_with = blob_path_begins_with - self.blob_path_ends_with = blob_path_ends_with - self.events = events - self.scope = scope + def __init__(self, **kwargs): + super(BlobEventsTrigger, self).__init__(**kwargs) + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.events = kwargs.get('events', None) + self.scope = kwargs.get('scope', None) self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py new file mode 100644 index 000000000000..fb65a420a2cd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.events = events + self.scope = scope + self.type = 'BlobEventsTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index df4ac9146607..284e0fcecde5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -15,6 +15,8 @@ class BlobSink(CopySink): """A copy activity Azure Blob sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). @@ -69,10 +71,10 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None): - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.blob_writer_overwrite_files = blob_writer_overwrite_files - self.blob_writer_date_time_format = blob_writer_date_time_format - self.blob_writer_add_header = blob_writer_add_header - self.copy_behavior = copy_behavior + def __init__(self, **kwargs): + super(BlobSink, self).__init__(**kwargs) + self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) + self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) + self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) + self.copy_behavior = kwargs.get('copy_behavior', None) self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py new file mode 100644 index 000000000000..370acc72e017 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior + self.type = 'BlobSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py index 0e2ef494a7b3..ab4313a890cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -15,6 +15,8 @@ class BlobSource(CopySource): """A copy activity Azure Blob source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class BlobSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). @@ -58,9 +60,9 @@ class BlobSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None): - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.treat_empty_as_null = treat_empty_as_null - self.skip_header_line_count = skip_header_line_count - self.recursive = recursive + def __init__(self, **kwargs): + super(BlobSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py new file mode 100644 index 000000000000..78d90cc61e13 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'BlobSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py index 981fc4aa3b66..4fd5b5c17100 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -18,6 +18,8 @@ class BlobTrigger(MultiplePipelineTrigger): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,18 +33,19 @@ class BlobTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param folder_path: The path of the container/folder that will trigger the - pipeline. + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. :type folder_path: str - :param max_concurrency: The max number of parallel files to handle when it - is triggered. + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. :type max_concurrency: int - :param linked_service: The Azure Storage linked service reference. + :param linked_service: Required. The Azure Storage linked service + reference. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ @@ -67,9 +70,9 @@ class BlobTrigger(MultiplePipelineTrigger): 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } - def __init__(self, folder_path, max_concurrency, linked_service, additional_properties=None, description=None, annotations=None, pipelines=None): - super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines) - self.folder_path = folder_path - self.max_concurrency = max_concurrency - self.linked_service = linked_service + def __init__(self, **kwargs): + super(BlobTrigger, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.linked_service = kwargs.get('linked_service', None) self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py new file mode 100644 index 000000000000..cccffd881bfb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service + self.type = 'BlobTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py index ea4b378af8fc..ebba2be42028 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -15,6 +15,8 @@ class CassandraLinkedService(LinkedService): """Linked service for Cassandra data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class CassandraLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: Host name for connection. Type: string (or Expression with - resultType string). + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). :type host: object :param authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). @@ -71,12 +73,12 @@ class CassandraLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None): - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.authentication_type = authentication_type - self.port = port - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(CassandraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py new file mode 100644 index 000000000000..f22f303cc61d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Cassandra' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index 28cccb9c05fe..e7ba96c18682 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -15,6 +15,8 @@ class CassandraSource(CopySource): """A copy activity source for a Cassandra database. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class CassandraSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with @@ -61,8 +63,8 @@ class CassandraSource(CopySource): 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None): - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.consistency_level = consistency_level + def __init__(self, **kwargs): + super(CassandraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.consistency_level = kwargs.get('consistency_level', None) self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py new file mode 100644 index 000000000000..bd95d158b868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', + 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', + 'LOCAL_SERIAL' + :type consistency_level: str or + ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.consistency_level = consistency_level + self.type = 'CassandraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py index b1f9bb81e662..b89c324fd4d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py @@ -15,6 +15,8 @@ class CassandraTableDataset(Dataset): """The Cassandra database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class CassandraTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class CassandraTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). @@ -68,8 +70,8 @@ class CassandraTableDataset(Dataset): 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None): - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.keyspace = keyspace + def __init__(self, **kwargs): + super(CassandraTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.keyspace = kwargs.get('keyspace', None) self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py new file mode 100644 index 000000000000..256358ce50cb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.keyspace = keyspace + self.type = 'CassandraTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py index eb89a6b5b83f..c7cd4c315852 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py @@ -15,6 +15,8 @@ class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class CommonDataServiceForAppsEntityDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class CommonDataServiceForAppsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class CommonDataServiceForAppsEntityDataset(Dataset): 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None): - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.entity_name = entity_name + def __init__(self, **kwargs): + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py new file mode 100644 index 000000000000..050bdb836963 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py index 5629ce8fb018..bbc8b7a0de65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py @@ -15,6 +15,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): """Common Data Service for Apps linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,13 +31,13 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param deployment_type: The deployment type of the Common Data Service for - Apps instance. 'Online' for Common Data Service for Apps Online and - 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. - Type: string (or Expression with resultType string). Possible values - include: 'Online', 'OnPremisesWithIfd' + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Common Data Service for @@ -56,14 +58,14 @@ class CommonDataServiceForAppsLinkedService(LinkedService): associated with the user. Type: string (or Expression with resultType string). :type organization_name: object - :param authentication_type: The authentication type to connect to Common - Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for - on-premises with Ifd scenario. Type: string (or Expression with resultType - string). Possible values include: 'Office365', 'Ifd' + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: User name to access the Common Data Service for Apps - instance. Type: string (or Expression with resultType string). + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Common Data Service for Apps instance. @@ -99,15 +101,15 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None): - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py new file mode 100644 index 000000000000..1c4897c09868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py index 675861c7ce81..0df48841cccc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py @@ -18,6 +18,8 @@ class CommonDataServiceForAppsSink(CopySink): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -39,10 +41,10 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :ivar write_behavior: The write behavior for the operation. Default value: - "Upsert" . + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. @@ -69,7 +71,7 @@ class CommonDataServiceForAppsSink(CopySink): write_behavior = "Upsert" - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None): - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.ignore_null_values = ignore_null_values + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py new file mode 100644 index 000000000000..80f85e6d5809 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py index 16ffa2e59da7..13d2a6b921bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py @@ -15,6 +15,8 @@ class CommonDataServiceForAppsSource(CopySource): """A copy activity Common Data Service for Apps source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class CommonDataServiceForAppsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: @@ -50,7 +52,7 @@ class CommonDataServiceForAppsSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py new file mode 100644 index 000000000000..713db90f9623 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py index 9acb7cbe90f4..04179d0d1f53 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -15,6 +15,8 @@ class ConcurLinkedService(LinkedService): """Concur Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,11 +31,13 @@ class ConcurLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param client_id: Application client_id supplied by Concur App Management. + :param client_id: Required. Application client_id supplied by Concur App + Management. :type client_id: object - :param username: The user name that you use to access Concur Service. + :param username: Required. The user name that you use to access Concur + Service. :type username: object :param password: The password corresponding to the user name that you provided in the username field. @@ -76,13 +80,13 @@ class ConcurLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, client_id, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.client_id = client_id - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ConcurLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py new file mode 100644 index 000000000000..4411db6d2856 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Concur' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py index 12ae8d1e6b70..e2595f9d8aff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py @@ -15,6 +15,8 @@ class ConcurObjectDataset(Dataset): """Concur Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ConcurObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ConcurObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class ConcurObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(ConcurObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py new file mode 100644 index 000000000000..9543a6395a32 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ConcurObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py index 7537b1f752c7..11ae557c0cda 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -15,6 +15,8 @@ class ConcurSource(CopySource): """A copy activity Concur Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ConcurSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ConcurSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ConcurSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py new file mode 100644 index 000000000000..ac8ae8fb5a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ConcurSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py index 323767f25a9f..2242bc36beb2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -20,10 +20,12 @@ class ControlActivity(Activity): SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -31,7 +33,7 @@ class ControlActivity(Activity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -40,10 +42,19 @@ class ControlActivity(Activity): 'type': {'required': True}, } + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + _subtype_map = { 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None): - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) + def __init__(self, **kwargs): + super(ControlActivity, self).__init__(**kwargs) self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py new file mode 100644 index 000000000000..0aabd99d741f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity_py3 import Activity + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index 4c2223cf07dc..2e7c00d551ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -15,10 +15,12 @@ class CopyActivity(ExecutionActivity): """Copy activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,16 +28,16 @@ class CopyActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Copy activity source. + :param source: Required. Copy activity source. :type source: ~azure.mgmt.datafactory.models.CopySource - :param sink: Copy activity sink. + :param sink: Required. Copy activity sink. :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. @@ -104,19 +106,19 @@ class CopyActivity(ExecutionActivity): 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, name, source, sink, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None): - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.source = source - self.sink = sink - self.translator = translator - self.enable_staging = enable_staging - self.staging_settings = staging_settings - self.parallel_copies = parallel_copies - self.data_integration_units = data_integration_units - self.enable_skip_incompatible_row = enable_skip_incompatible_row - self.redirect_incompatible_row_settings = redirect_incompatible_row_settings - self.preserve_rules = preserve_rules - self.preserve = preserve - self.inputs = inputs - self.outputs = outputs + def __init__(self, **kwargs): + super(CopyActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.sink = kwargs.get('sink', None) + self.translator = kwargs.get('translator', None) + self.enable_staging = kwargs.get('enable_staging', None) + self.staging_settings = kwargs.get('staging_settings', None) + self.parallel_copies = kwargs.get('parallel_copies', None) + self.data_integration_units = kwargs.get('data_integration_units', None) + self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) + self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py new file mode 100644 index 000000000000..f8a1fee5625d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve + self.inputs = inputs + self.outputs = outputs + self.type = 'Copy' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 99d85c435117..cbe8f2ecf8f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -26,6 +26,8 @@ class CopySink(Model): SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, DelimitedTextSink + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -47,7 +49,7 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -69,12 +71,12 @@ class CopySink(Model): 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None): - super(CopySink, self).__init__() - self.additional_properties = additional_properties - self.write_batch_size = write_batch_size - self.write_batch_timeout = write_batch_timeout - self.sink_retry_count = sink_retry_count - self.sink_retry_wait = sink_retry_wait - self.max_concurrent_connections = max_concurrent_connections + def __init__(self, **kwargs): + super(CopySink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.write_batch_size = kwargs.get('write_batch_size', None) + self.write_batch_timeout = kwargs.get('write_batch_timeout', None) + self.sink_retry_count = kwargs.get('sink_retry_count', None) + self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py new file mode 100644 index 000000000000..3720bece5674 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, + ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, JsonSink, + DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySink, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 37257f2785fa..b37bca86de5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -38,6 +38,8 @@ class CopySource(Model): DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -52,7 +54,7 @@ class CopySource(Model): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -72,10 +74,10 @@ class CopySource(Model): 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None): - super(CopySource, self).__init__() - self.additional_properties = additional_properties - self.source_retry_count = source_retry_count - self.source_retry_wait = source_retry_wait - self.max_concurrent_connections = max_concurrent_connections + def __init__(self, **kwargs): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py new file mode 100644 index 000000000000..22b8c590498e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, + CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, + WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, + AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, + AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, + SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, + SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, + MicrosoftAccessSource, InformixSource, RelationalSource, + CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + JsonSource, DelimitedTextSource, ParquetSource, AvroSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py index 7723a1b2c4f9..6a8a462f6f46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -15,6 +15,8 @@ class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class CosmosDbLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in @@ -61,9 +63,9 @@ class CosmosDbLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None): - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.account_key = account_key - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(CosmosDbLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py new file mode 100644 index 000000000000..57dab80e06b9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.encrypted_credential = encrypted_credential + self.type = 'CosmosDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py index 56acfa06a8b4..d86648eb5eee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -15,6 +15,8 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param collection: The collection name of the CosmosDB (MongoDB API) - database. Type: string (or Expression with resultType string). + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). :type collection: object """ @@ -65,7 +67,7 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } - def __init__(self, linked_service_name, collection, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.collection = collection + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py new file mode 100644 index 000000000000..de2650fa14b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py index 202a1c4427e0..f76e7c5f5a41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -15,6 +15,8 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,14 +31,15 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The CosmosDB (MongoDB API) connection string. - Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, - SecureString or AzureKeyVaultSecretReference. + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param database: The name of the CosmosDB (MongoDB API) database that you - want to access. Type: string (or Expression with resultType string). + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). :type database: object """ @@ -57,8 +60,8 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'database': {'key': 'typeProperties.database', 'type': 'object'}, } - def __init__(self, connection_string, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.database = database + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py new file mode 100644 index 000000000000..2a72bfce4ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py index d78eccb1030a..0d40b52add80 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -15,6 +15,8 @@ class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default @@ -60,7 +62,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None): - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.write_behavior = write_behavior + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py new file mode 100644 index 000000000000..5db1ee5c9d36 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py index c6b65fef3192..44dc7443427b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -15,6 +15,8 @@ class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class CosmosDbMongoDbApiSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty @@ -61,9 +63,9 @@ class CosmosDbMongoDbApiSource(CopySource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None): - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py new file mode 100644 index 000000000000..7d180f105abf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py index affcebbee4f0..76e45648f941 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -15,6 +15,8 @@ class CouchbaseLinkedService(LinkedService): """Couchbase server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class CouchbaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -60,9 +62,9 @@ class CouchbaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None): - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.cred_string = cred_string - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py new file mode 100644 index 000000000000..afe336f666de --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.cred_string = cred_string + self.encrypted_credential = encrypted_credential + self.type = 'Couchbase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py index f11448fbaefb..b355605417d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -15,6 +15,8 @@ class CouchbaseSource(CopySource): """A copy activity Couchbase server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class CouchbaseSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class CouchbaseSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(CouchbaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py new file mode 100644 index 000000000000..1358fc20feba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CouchbaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py index 513a707f500b..821274b9aae4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py @@ -15,6 +15,8 @@ class CouchbaseTableDataset(Dataset): """Couchbase server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class CouchbaseTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class CouchbaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class CouchbaseTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(CouchbaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py new file mode 100644 index 000000000000..cf5299fd55a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'CouchbaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py index fa4d41425516..0e7002dcf68a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py @@ -35,9 +35,9 @@ class CreateLinkedIntegrationRuntimeRequest(Model): 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, } - def __init__(self, name=None, subscription_id=None, data_factory_name=None, data_factory_location=None): - super(CreateLinkedIntegrationRuntimeRequest, self).__init__() - self.name = name - self.subscription_id = subscription_id - self.data_factory_name = data_factory_name - self.data_factory_location = data_factory_location + def __init__(self, **kwargs): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.data_factory_name = kwargs.get('data_factory_name', None) + self.data_factory_location = kwargs.get('data_factory_location', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py new file mode 100644 index 000000000000..aad7d6fa5ac0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = name + self.subscription_id = subscription_id + self.data_factory_name = data_factory_name + self.data_factory_location = data_factory_location diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py index 696b3fcea4c4..18ec9f963e65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py @@ -15,7 +15,9 @@ class CreateRunResponse(Model): """Response body with a run identifier. - :param run_id: Identifier of a run. + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. :type run_id: str """ @@ -27,6 +29,6 @@ class CreateRunResponse(Model): 'run_id': {'key': 'runId', 'type': 'str'}, } - def __init__(self, run_id): - super(CreateRunResponse, self).__init__() - self.run_id = run_id + def __init__(self, **kwargs): + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = kwargs.get('run_id', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py new file mode 100644 index 000000000000..bb280441ae90 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, *, run_id: str, **kwargs) -> None: + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = run_id diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py index 79304132e099..01cfb7335d37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -15,10 +15,12 @@ class CustomActivity(ExecutionActivity): """Custom activity type. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,15 +28,15 @@ class CustomActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param command: Command for custom activity Type: string (or Expression - with resultType string). + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). :type command: object :param resource_linked_service: Resource linked service reference. :type resource_linked_service: @@ -78,12 +80,12 @@ class CustomActivity(ExecutionActivity): 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } - def __init__(self, name, command, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None): - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.command = command - self.resource_linked_service = resource_linked_service - self.folder_path = folder_path - self.reference_objects = reference_objects - self.extended_properties = extended_properties - self.retention_time_in_days = retention_time_in_days + def __init__(self, **kwargs): + super(CustomActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.resource_linked_service = kwargs.get('resource_linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + self.reference_objects = kwargs.get('reference_objects', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py new file mode 100644 index 000000000000..bf8326f053f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days + self.type = 'Custom' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py index bcf61066590b..5f95a54612dd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py @@ -27,7 +27,7 @@ class CustomActivityReferenceObject(Model): 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, } - def __init__(self, linked_services=None, datasets=None): - super(CustomActivityReferenceObject, self).__init__() - self.linked_services = linked_services - self.datasets = datasets + def __init__(self, **kwargs): + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = kwargs.get('linked_services', None) + self.datasets = kwargs.get('datasets', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py new file mode 100644 index 000000000000..f860f0141bd0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = linked_services + self.datasets = datasets diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py index b290ca34a41c..db14a05e7ad1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -15,6 +15,8 @@ class CustomDataSourceLinkedService(LinkedService): """Custom linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class CustomDataSourceLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param type_properties: Custom linked service properties. + :param type_properties: Required. Custom linked service properties. :type type_properties: object """ @@ -50,7 +52,7 @@ class CustomDataSourceLinkedService(LinkedService): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, type_properties, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.type_properties = type_properties + def __init__(self, **kwargs): + super(CustomDataSourceLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py new file mode 100644 index 000000000000..f7633ee28cbd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py index 370350c6d6f6..a242309c3fd1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -15,6 +15,8 @@ class CustomDataset(Dataset): """The custom dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class CustomDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class CustomDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param type_properties: Custom dataset properties. :type type_properties: object @@ -63,7 +65,7 @@ class CustomDataset(Dataset): 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None): - super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.type_properties = type_properties + def __init__(self, **kwargs): + super(CustomDataset, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py new file mode 100644 index 000000000000..c00dae2b2c56 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataset' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index ad33efdcc1d7..8b3e08f32768 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -12,7 +12,7 @@ from enum import Enum -class IntegrationRuntimeState(Enum): +class IntegrationRuntimeState(str, Enum): initial = "Initial" stopped = "Stopped" @@ -26,13 +26,13 @@ class IntegrationRuntimeState(Enum): access_denied = "AccessDenied" -class IntegrationRuntimeAutoUpdate(Enum): +class IntegrationRuntimeAutoUpdate(str, Enum): on = "On" off = "Off" -class ParameterType(Enum): +class ParameterType(str, Enum): object_enum = "Object" string = "String" @@ -43,7 +43,7 @@ class ParameterType(Enum): secure_string = "SecureString" -class DependencyCondition(Enum): +class DependencyCondition(str, Enum): succeeded = "Succeeded" failed = "Failed" @@ -51,21 +51,21 @@ class DependencyCondition(Enum): completed = "Completed" -class VariableType(Enum): +class VariableType(str, Enum): string = "String" bool_enum = "Bool" array = "Array" -class TriggerRuntimeState(Enum): +class TriggerRuntimeState(str, Enum): started = "Started" stopped = "Stopped" disabled = "Disabled" -class EventSubscriptionStatus(Enum): +class EventSubscriptionStatus(str, Enum): enabled = "Enabled" provisioning = "Provisioning" @@ -74,7 +74,7 @@ class EventSubscriptionStatus(Enum): unknown = "Unknown" -class RunQueryFilterOperand(Enum): +class RunQueryFilterOperand(str, Enum): pipeline_name = "PipelineName" status = "Status" @@ -90,7 +90,7 @@ class RunQueryFilterOperand(Enum): latest_only = "LatestOnly" -class RunQueryFilterOperator(Enum): +class RunQueryFilterOperator(str, Enum): equals = "Equals" not_equals = "NotEquals" @@ -98,7 +98,7 @@ class RunQueryFilterOperator(Enum): not_in = "NotIn" -class RunQueryOrderByField(Enum): +class RunQueryOrderByField(str, Enum): run_start = "RunStart" run_end = "RunEnd" @@ -111,32 +111,32 @@ class RunQueryOrderByField(Enum): trigger_run_timestamp = "TriggerRunTimestamp" -class RunQueryOrder(Enum): +class RunQueryOrder(str, Enum): asc = "ASC" desc = "DESC" -class TriggerRunStatus(Enum): +class TriggerRunStatus(str, Enum): succeeded = "Succeeded" failed = "Failed" inprogress = "Inprogress" -class TumblingWindowFrequency(Enum): +class TumblingWindowFrequency(str, Enum): minute = "Minute" hour = "Hour" -class BlobEventTypes(Enum): +class BlobEventTypes(str, Enum): microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" -class DayOfWeek(Enum): +class DayOfWeek(str, Enum): sunday = "Sunday" monday = "Monday" @@ -147,7 +147,7 @@ class DayOfWeek(Enum): saturday = "Saturday" -class DaysOfWeek(Enum): +class DaysOfWeek(str, Enum): sunday = "Sunday" monday = "Monday" @@ -158,7 +158,7 @@ class DaysOfWeek(Enum): saturday = "Saturday" -class RecurrenceFrequency(Enum): +class RecurrenceFrequency(str, Enum): not_specified = "NotSpecified" minute = "Minute" @@ -169,27 +169,27 @@ class RecurrenceFrequency(Enum): year = "Year" -class GoogleAdWordsAuthenticationType(Enum): +class GoogleAdWordsAuthenticationType(str, Enum): service_authentication = "ServiceAuthentication" user_authentication = "UserAuthentication" -class SparkServerType(Enum): +class SparkServerType(str, Enum): shark_server = "SharkServer" shark_server2 = "SharkServer2" spark_thrift_server = "SparkThriftServer" -class SparkThriftTransportProtocol(Enum): +class SparkThriftTransportProtocol(str, Enum): binary = "Binary" sasl = "SASL" http = "HTTP " -class SparkAuthenticationType(Enum): +class SparkAuthenticationType(str, Enum): anonymous = "Anonymous" username = "Username" @@ -197,47 +197,47 @@ class SparkAuthenticationType(Enum): windows_azure_hd_insight_service = "WindowsAzureHDInsightService" -class ServiceNowAuthenticationType(Enum): +class ServiceNowAuthenticationType(str, Enum): basic = "Basic" oauth2 = "OAuth2" -class PrestoAuthenticationType(Enum): +class PrestoAuthenticationType(str, Enum): anonymous = "Anonymous" ldap = "LDAP" -class PhoenixAuthenticationType(Enum): +class PhoenixAuthenticationType(str, Enum): anonymous = "Anonymous" username_and_password = "UsernameAndPassword" windows_azure_hd_insight_service = "WindowsAzureHDInsightService" -class ImpalaAuthenticationType(Enum): +class ImpalaAuthenticationType(str, Enum): anonymous = "Anonymous" sasl_username = "SASLUsername" username_and_password = "UsernameAndPassword" -class HiveServerType(Enum): +class HiveServerType(str, Enum): hive_server1 = "HiveServer1" hive_server2 = "HiveServer2" hive_thrift_server = "HiveThriftServer" -class HiveThriftTransportProtocol(Enum): +class HiveThriftTransportProtocol(str, Enum): binary = "Binary" sasl = "SASL" http = "HTTP " -class HiveAuthenticationType(Enum): +class HiveAuthenticationType(str, Enum): anonymous = "Anonymous" username = "Username" @@ -245,37 +245,37 @@ class HiveAuthenticationType(Enum): windows_azure_hd_insight_service = "WindowsAzureHDInsightService" -class HBaseAuthenticationType(Enum): +class HBaseAuthenticationType(str, Enum): anonymous = "Anonymous" basic = "Basic" -class GoogleBigQueryAuthenticationType(Enum): +class GoogleBigQueryAuthenticationType(str, Enum): service_authentication = "ServiceAuthentication" user_authentication = "UserAuthentication" -class SapHanaAuthenticationType(Enum): +class SapHanaAuthenticationType(str, Enum): basic = "Basic" windows = "Windows" -class SftpAuthenticationType(Enum): +class SftpAuthenticationType(str, Enum): basic = "Basic" ssh_public_key = "SshPublicKey" -class FtpAuthenticationType(Enum): +class FtpAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" -class HttpAuthenticationType(Enum): +class HttpAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" @@ -284,7 +284,7 @@ class HttpAuthenticationType(Enum): client_certificate = "ClientCertificate" -class RestServiceAuthenticationType(Enum): +class RestServiceAuthenticationType(str, Enum): anonymous = "Anonymous" basic = "Basic" @@ -292,13 +292,13 @@ class RestServiceAuthenticationType(Enum): managed_service_identity = "ManagedServiceIdentity" -class MongoDbAuthenticationType(Enum): +class MongoDbAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" -class ODataAuthenticationType(Enum): +class ODataAuthenticationType(str, Enum): basic = "Basic" anonymous = "Anonymous" @@ -307,42 +307,42 @@ class ODataAuthenticationType(Enum): managed_service_identity = "ManagedServiceIdentity" -class ODataAadServicePrincipalCredentialType(Enum): +class ODataAadServicePrincipalCredentialType(str, Enum): service_principal_key = "ServicePrincipalKey" service_principal_cert = "ServicePrincipalCert" -class TeradataAuthenticationType(Enum): +class TeradataAuthenticationType(str, Enum): basic = "Basic" windows = "Windows" -class Db2AuthenticationType(Enum): +class Db2AuthenticationType(str, Enum): basic = "Basic" -class SybaseAuthenticationType(Enum): +class SybaseAuthenticationType(str, Enum): basic = "Basic" windows = "Windows" -class DynamicsDeploymentType(Enum): +class DynamicsDeploymentType(str, Enum): online = "Online" on_premises_with_ifd = "OnPremisesWithIfd" -class DynamicsAuthenticationType(Enum): +class DynamicsAuthenticationType(str, Enum): office365 = "Office365" ifd = "Ifd" -class AvroCompressionCodec(Enum): +class AvroCompressionCodec(str, Enum): none = "none" deflate = "deflate" @@ -351,7 +351,7 @@ class AvroCompressionCodec(Enum): bzip2 = "bzip2" -class AzureFunctionActivityMethod(Enum): +class AzureFunctionActivityMethod(str, Enum): get = "GET" post = "POST" @@ -362,7 +362,7 @@ class AzureFunctionActivityMethod(Enum): trace = "TRACE" -class WebActivityMethod(Enum): +class WebActivityMethod(str, Enum): get = "GET" post = "POST" @@ -370,14 +370,14 @@ class WebActivityMethod(Enum): delete = "DELETE" -class NetezzaPartitionOption(Enum): +class NetezzaPartitionOption(str, Enum): none = "None" data_slice = "DataSlice" dynamic_range = "DynamicRange" -class CassandraSourceReadConsistencyLevels(Enum): +class CassandraSourceReadConsistencyLevels(str, Enum): all = "ALL" each_quorum = "EACH_QUORUM" @@ -391,21 +391,21 @@ class CassandraSourceReadConsistencyLevels(Enum): local_serial = "LOCAL_SERIAL" -class TeradataPartitionOption(Enum): +class TeradataPartitionOption(str, Enum): none = "None" hash = "Hash" dynamic_range = "DynamicRange" -class OraclePartitionOption(Enum): +class OraclePartitionOption(str, Enum): none = "None" physical_partitions_of_table = "PhysicalPartitionsOfTable" dynamic_range = "DynamicRange" -class StoredProcedureParameterType(Enum): +class StoredProcedureParameterType(str, Enum): string = "String" int_enum = "Int" @@ -416,7 +416,7 @@ class StoredProcedureParameterType(Enum): date_enum = "Date" -class SapTablePartitionOption(Enum): +class SapTablePartitionOption(str, Enum): none = "None" partition_on_int = "PartitionOnInt" @@ -426,67 +426,67 @@ class SapTablePartitionOption(Enum): partition_on_time = "PartitionOnTime" -class SalesforceSourceReadBehavior(Enum): +class SalesforceSourceReadBehavior(str, Enum): query = "Query" query_all = "QueryAll" -class SsisPackageLocationType(Enum): +class SsisPackageLocationType(str, Enum): ssisdb = "SSISDB" file = "File" -class HDInsightActivityDebugInfoOption(Enum): +class HDInsightActivityDebugInfoOption(str, Enum): none = "None" always = "Always" failure = "Failure" -class SalesforceSinkWriteBehavior(Enum): +class SalesforceSinkWriteBehavior(str, Enum): insert = "Insert" upsert = "Upsert" -class AzureSearchIndexWriteBehaviorType(Enum): +class AzureSearchIndexWriteBehaviorType(str, Enum): merge = "Merge" upload = "Upload" -class PolybaseSettingsRejectType(Enum): +class PolybaseSettingsRejectType(str, Enum): value = "value" percentage = "percentage" -class JsonWriteFilePattern(Enum): +class JsonWriteFilePattern(str, Enum): set_of_objects = "setOfObjects" array_of_objects = "arrayOfObjects" -class SapCloudForCustomerSinkWriteBehavior(Enum): +class SapCloudForCustomerSinkWriteBehavior(str, Enum): insert = "Insert" update = "Update" -class WebHookActivityMethod(Enum): +class WebHookActivityMethod(str, Enum): post = "POST" -class IntegrationRuntimeType(Enum): +class IntegrationRuntimeType(str, Enum): managed = "Managed" self_hosted = "SelfHosted" -class SelfHostedIntegrationRuntimeNodeStatus(Enum): +class SelfHostedIntegrationRuntimeNodeStatus(str, Enum): need_registration = "NeedRegistration" online = "Online" @@ -497,21 +497,21 @@ class SelfHostedIntegrationRuntimeNodeStatus(Enum): initialize_failed = "InitializeFailed" -class IntegrationRuntimeUpdateResult(Enum): +class IntegrationRuntimeUpdateResult(str, Enum): none = "None" succeed = "Succeed" fail = "Fail" -class IntegrationRuntimeInternalChannelEncryptionMode(Enum): +class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum): not_set = "NotSet" ssl_encrypted = "SslEncrypted" not_encrypted = "NotEncrypted" -class ManagedIntegrationRuntimeNodeStatus(Enum): +class ManagedIntegrationRuntimeNodeStatus(str, Enum): starting = "Starting" available = "Available" @@ -519,13 +519,13 @@ class ManagedIntegrationRuntimeNodeStatus(Enum): unavailable = "Unavailable" -class IntegrationRuntimeEntityReferenceType(Enum): +class IntegrationRuntimeEntityReferenceType(str, Enum): integration_runtime_reference = "IntegrationRuntimeReference" linked_service_reference = "LinkedServiceReference" -class IntegrationRuntimeSsisCatalogPricingTier(Enum): +class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): basic = "Basic" standard = "Standard" @@ -533,19 +533,19 @@ class IntegrationRuntimeSsisCatalogPricingTier(Enum): premium_rs = "PremiumRS" -class IntegrationRuntimeLicenseType(Enum): +class IntegrationRuntimeLicenseType(str, Enum): base_price = "BasePrice" license_included = "LicenseIncluded" -class IntegrationRuntimeEdition(Enum): +class IntegrationRuntimeEdition(str, Enum): standard = "Standard" enterprise = "Enterprise" -class SsisObjectMetadataType(Enum): +class SsisObjectMetadataType(str, Enum): folder = "Folder" project = "Project" @@ -553,7 +553,7 @@ class SsisObjectMetadataType(Enum): environment = "Environment" -class IntegrationRuntimeAuthKeyName(Enum): +class IntegrationRuntimeAuthKeyName(str, Enum): auth_key1 = "authKey1" auth_key2 = "authKey2" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py index 2c31b8fda0ed..364dfd79d71a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py @@ -15,10 +15,12 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): """Data Lake Analytics U-SQL activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,17 +28,17 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param script_path: Case-sensitive path to folder that contains the U-SQL - script. Type: string (or Expression with resultType string). + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). :type script_path: object - :param script_linked_service: Script linked service reference. + :param script_linked_service: Required. Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously @@ -84,13 +86,13 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, } - def __init__(self, name, script_path, script_linked_service, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None): - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.script_path = script_path - self.script_linked_service = script_linked_service - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.parameters = parameters - self.runtime_version = runtime_version - self.compilation_mode = compilation_mode + def __init__(self, **kwargs): + super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) + self.priority = kwargs.get('priority', None) + self.parameters = kwargs.get('parameters', None) + self.runtime_version = kwargs.get('runtime_version', None) + self.compilation_mode = kwargs.get('compilation_mode', None) self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py new file mode 100644 index 000000000000..22623aa3622c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode + self.type = 'DataLakeAnalyticsU-SQL' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py index 1c28434b0ee4..a49bd973e2b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py @@ -15,10 +15,12 @@ class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,16 +28,16 @@ class DatabricksNotebookActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param notebook_path: The absolute path of the notebook to be run in the - Databricks Workspace. This path must begin with a slash. Type: string (or - Expression with resultType string). + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). :type notebook_path: object :param base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default @@ -66,9 +68,9 @@ class DatabricksNotebookActivity(ExecutionActivity): 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } - def __init__(self, name, notebook_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None): - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.notebook_path = notebook_path - self.base_parameters = base_parameters - self.libraries = libraries + def __init__(self, **kwargs): + super(DatabricksNotebookActivity, self).__init__(**kwargs) + self.notebook_path = kwargs.get('notebook_path', None) + self.base_parameters = kwargs.get('base_parameters', None) + self.libraries = kwargs.get('libraries', None) self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py new file mode 100644 index 000000000000..7d2d464b7a1a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries + self.type = 'DatabricksNotebook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py index 73348cc09b72..51e7245d12fe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py @@ -15,10 +15,12 @@ class DatabricksSparkJarActivity(ExecutionActivity): """DatabricksSparkJar activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,16 +28,17 @@ class DatabricksSparkJarActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: The full name of the class containing the main - method to be executed. This class must be contained in a JAR provided as a - library. Type: string (or Expression with resultType string). + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). :type main_class_name: object :param parameters: Parameters that will be passed to the main method. :type parameters: list[object] @@ -64,9 +67,9 @@ class DatabricksSparkJarActivity(ExecutionActivity): 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } - def __init__(self, name, main_class_name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None): - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries + def __init__(self, **kwargs): + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.main_class_name = kwargs.get('main_class_name', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py new file mode 100644 index 000000000000..6c33f3b51d1e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkJar' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py index f476fd1019a9..56178d3882c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py @@ -15,10 +15,12 @@ class DatabricksSparkPythonActivity(ExecutionActivity): """DatabricksSparkPython activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,15 +28,16 @@ class DatabricksSparkPythonActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: The URI of the Python file to be executed. DBFS paths - are supported. Type: string (or Expression with resultType string). + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). :type python_file: object :param parameters: Command line parameters that will be passed to the Python file. @@ -64,9 +67,9 @@ class DatabricksSparkPythonActivity(ExecutionActivity): 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } - def __init__(self, name, python_file, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None): - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries + def __init__(self, **kwargs): + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.python_file = kwargs.get('python_file', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py new file mode 100644 index 000000000000..5b16d0d5e9ef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkPython' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 49eea57e719a..e8e2974b4481 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -49,6 +49,8 @@ class Dataset(Model): JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -61,7 +63,7 @@ class Dataset(Model): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -73,7 +75,7 @@ class Dataset(Model): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -98,14 +100,14 @@ class Dataset(Model): 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(Dataset, self).__init__() - self.additional_properties = additional_properties - self.description = description - self.structure = structure - self.schema = schema - self.linked_service_name = linked_service_name - self.parameters = parameters - self.annotations = annotations - self.folder = folder + def __init__(self, **kwargs): + super(Dataset, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.structure = kwargs.get('structure', None) + self.schema = kwargs.get('schema', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py index d31981df108e..71b041c5eb5b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py @@ -15,10 +15,12 @@ class DatasetBZip2Compression(DatasetCompression): """The BZip2 compression method used on a dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -26,6 +28,11 @@ class DatasetBZip2Compression(DatasetCompression): 'type': {'required': True}, } - def __init__(self, additional_properties=None): - super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetBZip2Compression, self).__init__(**kwargs) self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py new file mode 100644 index 000000000000..f97af4588e0a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BZip2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py index 78066e1cb4d8..c0c4e3d52624 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py @@ -19,10 +19,12 @@ class DatasetCompression(Model): sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -39,7 +41,7 @@ class DatasetCompression(Model): 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } - def __init__(self, additional_properties=None): - super(DatasetCompression, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py new file mode 100644 index 000000000000..3b10abc69abf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py index e362d747db73..9c97e2bfa5e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -15,10 +15,12 @@ class DatasetDeflateCompression(DatasetCompression): """The Deflate compression method used on a dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param level: The Deflate compression level. :type level: object @@ -34,7 +36,7 @@ class DatasetDeflateCompression(DatasetCompression): 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, additional_properties=None, level=None): - super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties) - self.level = level + def __init__(self, **kwargs): + super(DatasetDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py new file mode 100644 index 000000000000..11d00081bc1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'Deflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py index caeb62a3416b..882c84a1e84c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py @@ -24,6 +24,6 @@ class DatasetFolder(Model): 'name': {'key': 'name', 'type': 'str'}, } - def __init__(self, name=None): - super(DatasetFolder, self).__init__() - self.name = name + def __init__(self, **kwargs): + super(DatasetFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py new file mode 100644 index 000000000000..ea7fc313f967 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(DatasetFolder, self).__init__(**kwargs) + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py index 99064460bd1f..4925127c7f0f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -15,10 +15,12 @@ class DatasetGZipCompression(DatasetCompression): """The GZip compression method used on a dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param level: The GZip compression level. :type level: object @@ -34,7 +36,7 @@ class DatasetGZipCompression(DatasetCompression): 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, additional_properties=None, level=None): - super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties) - self.level = level + def __init__(self, **kwargs): + super(DatasetGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py new file mode 100644 index 000000000000..97346e06366d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'GZip' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py index 3b14089552f7..2c318a91cccb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -15,10 +15,12 @@ class DatasetLocation(Model): """Dataset location. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -39,9 +41,9 @@ class DatasetLocation(Model): 'file_name': {'key': 'fileName', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): - super(DatasetLocation, self).__init__() - self.additional_properties = additional_properties - self.type = type - self.folder_path = folder_path - self.file_name = file_name + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py new file mode 100644 index 000000000000..82550c2a0df8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, + MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, + HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, + GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, + DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, + AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, + AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, + RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, + SapHanaTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, + OdbcTableDataset, InformixTableDataset, RelationalTableDataset, + Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, + TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, + MongoDbCollectionDataset, FileShareDataset, Office365Dataset, + AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(Dataset, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.structure = structure + self.schema = schema + self.linked_service_name = linked_service_name + self.parameters = parameters + self.annotations = annotations + self.folder = folder + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py index 006074933fe7..ca3d385f31ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py @@ -18,9 +18,12 @@ class DatasetReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: Dataset reference type. Default value: "DatasetReference" . + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . :vartype type: str - :param reference_name: Reference dataset name. + :param reference_name: Required. Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. :type parameters: dict[str, object] @@ -39,7 +42,7 @@ class DatasetReference(Model): type = "DatasetReference" - def __init__(self, reference_name, parameters=None): - super(DatasetReference, self).__init__() - self.reference_name = reference_name - self.parameters = parameters + def __init__(self, **kwargs): + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py new file mode 100644 index 000000000000..80162fd77da1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py index ec6a78c21f79..a68fb563e425 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py @@ -18,6 +18,8 @@ class DatasetResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -26,7 +28,7 @@ class DatasetResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Dataset properties. + :param properties: Required. Dataset properties. :type properties: ~azure.mgmt.datafactory.models.Dataset """ @@ -46,6 +48,6 @@ class DatasetResource(SubResource): 'properties': {'key': 'properties', 'type': 'Dataset'}, } - def __init__(self, properties): - super(DatasetResource, self).__init__() - self.properties = properties + def __init__(self, **kwargs): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py new file mode 100644 index 000000000000..6eb099dcb884 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py index 2ae233fac19f..b3160565230d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py @@ -19,6 +19,8 @@ class DatasetStorageFormat(Model): sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, TextFormat + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -28,7 +30,7 @@ class DatasetStorageFormat(Model): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -47,9 +49,9 @@ class DatasetStorageFormat(Model): 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} } - def __init__(self, additional_properties=None, serializer=None, deserializer=None): - super(DatasetStorageFormat, self).__init__() - self.additional_properties = additional_properties - self.serializer = serializer - self.deserializer = deserializer + def __init__(self, **kwargs): + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.serializer = kwargs.get('serializer', None) + self.deserializer = kwargs.get('deserializer', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py new file mode 100644 index 000000000000..faf746642d9e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.serializer = serializer + self.deserializer = deserializer + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py index 86f2b69b7234..ed80bf3cbcf2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -15,10 +15,12 @@ class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param level: The ZipDeflate compression level. :type level: object @@ -34,7 +36,7 @@ class DatasetZipDeflateCompression(DatasetCompression): 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, additional_properties=None, level=None): - super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties) - self.level = level + def __init__(self, **kwargs): + super(DatasetZipDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py new file mode 100644 index 000000000000..20abd6fe1088 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'ZipDeflate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py index c869ed972ad1..d163d2b93c18 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -15,6 +15,8 @@ class Db2LinkedService(LinkedService): """Linked service for DB2 data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,13 +31,13 @@ class Db2LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param server: Server name for connection. Type: string (or Expression - with resultType string). + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). :type server: object - :param database: Database name for connection. Type: string (or Expression - with resultType string). + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic' @@ -73,12 +75,12 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None): - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.database = database - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(Db2LinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py new file mode 100644 index 000000000000..44d784fa9bde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Db2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py index 469c22c5964a..a6e8c31ffa1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source.py @@ -15,6 +15,8 @@ class Db2Source(CopySource): """A copy activity source for Db2 databases. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class Db2Source(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class Db2Source(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(Db2Source, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py new file mode 100644 index 000000000000..20b169699ae0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Db2Source(CopySource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'Db2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py index d9ef4fa69d04..7092d5fc6cb3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset.py @@ -15,6 +15,8 @@ class Db2TableDataset(Dataset): """The Db2 table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class Db2TableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class Db2TableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class Db2TableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None): - super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.db2_table_dataset_schema = db2_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(Db2TableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.db2_table_dataset_schema = kwargs.get('db2_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py new file mode 100644 index 000000000000..3fa296454a69 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param db2_table_dataset_schema: The Db2 schema name. Type: string (or + Expression with resultType string). + :type db2_table_dataset_schema: object + :param table: The Db2 table name. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.db2_table_dataset_schema = db2_table_dataset_schema + self.table = table + self.type = 'Db2Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py index 232858510c29..34ba33a414d5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py @@ -15,10 +15,12 @@ class DeleteActivity(ExecutionActivity): """Delete activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class DeleteActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -48,7 +50,7 @@ class DeleteActivity(ExecutionActivity): when enableLogging is true. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings - :param dataset: Delete activity dataset reference. + :param dataset: Required. Delete activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -75,11 +77,11 @@ class DeleteActivity(ExecutionActivity): 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } - def __init__(self, name, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections=None, enable_logging=None, log_storage_settings=None): - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.recursive = recursive - self.max_concurrent_connections = max_concurrent_connections - self.enable_logging = enable_logging - self.log_storage_settings = log_storage_settings - self.dataset = dataset + def __init__(self, **kwargs): + super(DeleteActivity, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.dataset = kwargs.get('dataset', None) self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py new file mode 100644 index 000000000000..5107d9a3381a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset + self.type = 'Delete' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py index c2bd4a61cd0a..bfee26fcd12c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -15,6 +15,8 @@ class DelimitedTextDataset(Dataset): """Delimited text dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class DelimitedTextDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,9 +41,9 @@ class DelimitedTextDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param location: The location of the delimited text storage. + :param location: Required. The location of the delimited text storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). @@ -105,16 +107,16 @@ class DelimitedTextDataset(Dataset): 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, } - def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None): - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.location = location - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.encoding_name = encoding_name - self.compression_codec = compression_codec - self.compression_level = compression_level - self.quote_char = quote_char - self.escape_char = escape_char - self.first_row_as_header = first_row_as_header - self.null_value = null_value + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py index 54f210b41f31..364b103c426a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings.py @@ -15,10 +15,12 @@ class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with @@ -36,6 +38,6 @@ class DelimitedTextReadSettings(FormatReadSettings): 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, skip_line_count=None): - super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type) - self.skip_line_count = skip_line_count + def __init__(self, **kwargs): + super(DelimitedTextReadSettings, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py new file mode 100644 index 000000000000..62aa0327cfb9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_settings_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_settings_py3 import FormatReadSettings + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py index f6f64392a9a0..15e0e590b4ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -15,6 +15,8 @@ class DelimitedTextSink(CopySink): """A copy activity DelimitedText sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -61,8 +63,8 @@ class DelimitedTextSink(CopySink): 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings - self.format_settings = format_settings + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..6481f8021527 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py index 7b8462198800..10a842ca374a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -15,6 +15,8 @@ class DelimitedTextSource(CopySource): """A copy activity DelimitedText source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DelimitedTextSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -52,8 +54,8 @@ class DelimitedTextSource(CopySource): 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings - self.format_settings = format_settings + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..e551e32c847e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py index db355b5a2dff..5e0d8db319e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings.py @@ -15,17 +15,19 @@ class DelimitedTextWriteSettings(FormatWriteSettings): """Delimited text write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The write setting type. + :param type: Required. The write setting type. :type type: str :param quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). :type quote_all_text: object - :param file_extension: The file extension used to create the files. Type: - string (or Expression with resultType string). + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). :type file_extension: object """ @@ -41,7 +43,7 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'file_extension': {'key': 'fileExtension', 'type': 'object'}, } - def __init__(self, type, file_extension, additional_properties=None, quote_all_text=None): - super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type) - self.quote_all_text = quote_all_text - self.file_extension = file_extension + def __init__(self, **kwargs): + super(DelimitedTextWriteSettings, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py new file mode 100644 index 000000000000..2be019ab1e6a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_settings_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py index c807dc52afe1..89e750df8f0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py @@ -19,7 +19,9 @@ class DependencyReference(Model): sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str """ @@ -35,6 +37,6 @@ class DependencyReference(Model): 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} } - def __init__(self): - super(DependencyReference, self).__init__() + def __init__(self, **kwargs): + super(DependencyReference, self).__init__(**kwargs) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py new file mode 100644 index 000000000000..1b0647b74991 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs) -> None: + super(DependencyReference, self).__init__(**kwargs) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py index fde14d4a8c35..a8065ec3cc06 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py @@ -15,13 +15,16 @@ class DistcpSettings(Model): """Distcp settings. - :param resource_manager_endpoint: Specifies the Yarn ResourceManager - endpoint. Type: string (or Expression with resultType string). + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). :type resource_manager_endpoint: object - :param temp_script_path: Specifies an existing folder path which will be - used to store temp Distcp command script. The script file is generated by - ADF and will be removed after Copy job finished. Type: string (or - Expression with resultType string). + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). :type temp_script_path: object :param distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). @@ -39,8 +42,8 @@ class DistcpSettings(Model): 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, } - def __init__(self, resource_manager_endpoint, temp_script_path, distcp_options=None): - super(DistcpSettings, self).__init__() - self.resource_manager_endpoint = resource_manager_endpoint - self.temp_script_path = temp_script_path - self.distcp_options = distcp_options + def __init__(self, **kwargs): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) + self.temp_script_path = kwargs.get('temp_script_path', None) + self.distcp_options = kwargs.get('distcp_options', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py new file mode 100644 index 000000000000..628e2d207f8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py index 543f756ce825..fb2b8d46fa9c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py @@ -15,6 +15,8 @@ class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class DocumentDbCollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class DocumentDbCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param collection_name: Document Database collection name. Type: string - (or Expression with resultType string). + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). :type collection_name: object """ @@ -65,7 +67,7 @@ class DocumentDbCollectionDataset(Dataset): 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } - def __init__(self, linked_service_name, collection_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.collection_name = collection_name + def __init__(self, **kwargs): + super(DocumentDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py new file mode 100644 index 000000000000..5eb4dbbf0997 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'DocumentDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 42d1a3408d3d..c2908dc1dd05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -15,6 +15,8 @@ class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). @@ -62,8 +64,8 @@ class DocumentDbCollectionSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None): - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.nesting_separator = nesting_separator - self.write_behavior = write_behavior + def __init__(self, **kwargs): + super(DocumentDbCollectionSink, self).__init__(**kwargs) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py new file mode 100644 index 000000000000..f1410cd211a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior + self.type = 'DocumentDbCollectionSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py index 984a7ca07efc..9fdd23f2795f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -15,6 +15,8 @@ class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DocumentDbCollectionSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType string). @@ -53,8 +55,8 @@ class DocumentDbCollectionSource(CopySource): 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None): - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.nesting_separator = nesting_separator + def __init__(self, **kwargs): + super(DocumentDbCollectionSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.nesting_separator = kwargs.get('nesting_separator', None) self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py new file mode 100644 index 000000000000..9e0bf6382b04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.nesting_separator = nesting_separator + self.type = 'DocumentDbCollectionSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py index 93457bad7144..c5428ace02a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -15,6 +15,8 @@ class DrillLinkedService(LinkedService): """Drill server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DrillLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -59,9 +61,9 @@ class DrillLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(DrillLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py new file mode 100644 index 000000000000..5fb0cb25ecdb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Drill' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py index df41ba1e85ed..9a3391f27786 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -15,6 +15,8 @@ class DrillSource(CopySource): """A copy activity Drill server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DrillSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class DrillSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(DrillSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py new file mode 100644 index 000000000000..313183abab83 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DrillSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py index d277b8da3473..3dfd5715deb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py @@ -15,6 +15,8 @@ class DrillTableDataset(Dataset): """Drill server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class DrillTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class DrillTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class DrillTableDataset(Dataset): 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None): - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.drill_table_dataset_schema = drill_table_dataset_schema + def __init__(self, **kwargs): + super(DrillTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.drill_table_dataset_schema = kwargs.get('drill_table_dataset_schema', None) self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py new file mode 100644 index 000000000000..db46bdc4e0bd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression + with resultType string). + :type table: object + :param drill_table_dataset_schema: The schema name of the Drill. Type: + string (or Expression with resultType string). + :type drill_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.drill_table_dataset_schema = drill_table_dataset_schema + self.type = 'DrillTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py index bea4e0a82b36..5ff0b150718b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -15,6 +15,8 @@ class DynamicsAXLinkedService(LinkedService): """Dynamics AX linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,25 +31,25 @@ class DynamicsAXLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The Dynamics AX (or Dynamics 365 Finance and Operations) - instance OData endpoint. + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. :type url: object - :param service_principal_id: Specify the application's client ID. Type: - string (or Expression with resultType string). + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: Specify the application's key. Mark this - field as a SecureString to store it securely in Data Factory, or reference - a secret stored in Azure Key Vault. Type: string (or Expression with - resultType string). + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) - under which your application resides. Retrieve it by hovering the mouse in - the top-right corner of the Azure portal. Type: string (or Expression with - resultType string). + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). :type tenant: object - :param aad_resource_id: Specify the resource you are requesting + :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for @@ -80,12 +82,12 @@ class DynamicsAXLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, encrypted_credential=None): - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py new file mode 100644 index 000000000000..79d3a34ba313 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py index 6fe4046a328e..392b8ac7b971 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -15,6 +15,8 @@ class DynamicsAXResourceDataset(Dataset): """The path of the Dynamics AX OData entity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class DynamicsAXResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class DynamicsAXResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param path: The path of the Dynamics AX OData entity. Type: string (or - Expression with resultType string). + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). :type path: object """ @@ -65,7 +67,7 @@ class DynamicsAXResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, linked_service_name, path, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.path = path + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py new file mode 100644 index 000000000000..6cade3e4aa59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py index ce549554360a..619bad0f75c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -15,6 +15,8 @@ class DynamicsAXSource(CopySource): """A copy activity Dynamics AX source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DynamicsAXSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class DynamicsAXSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py new file mode 100644 index 000000000000..7679e68bae7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py index e167a1eccc27..ff4079761cf0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py @@ -15,6 +15,8 @@ class DynamicsCrmEntityDataset(Dataset): """The Dynamics CRM entity dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class DynamicsCrmEntityDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class DynamicsCrmEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class DynamicsCrmEntityDataset(Dataset): 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None): - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.entity_name = entity_name + def __init__(self, **kwargs): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py new file mode 100644 index 000000000000..4a1ef86b2dc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py index 8e81da0ae9ef..aad71042bb04 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py @@ -15,6 +15,8 @@ class DynamicsCrmLinkedService(LinkedService): """Dynamics CRM linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,12 +31,12 @@ class DynamicsCrmLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param deployment_type: The deployment type of the Dynamics CRM instance. - 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM - on-premises with Ifd. Type: string (or Expression with resultType string). - Possible values include: 'Online', 'OnPremisesWithIfd' + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Dynamics CRM server. @@ -54,14 +56,14 @@ class DynamicsCrmLinkedService(LinkedService): when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object - :param authentication_type: The authentication type to connect to Dynamics - CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with - Ifd scenario. Type: string (or Expression with resultType string). - Possible values include: 'Office365', 'Ifd' + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics CRM instance. Type: - string (or Expression with resultType string). + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics CRM instance. :type password: ~azure.mgmt.datafactory.models.SecretBase @@ -96,15 +98,15 @@ class DynamicsCrmLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None): - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py new file mode 100644 index 000000000000..2286301fabef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py index 2e633abbffa2..2d0f462e0f59 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py @@ -18,6 +18,8 @@ class DynamicsCrmSink(CopySink): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -39,10 +41,10 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :ivar write_behavior: The write behavior for the operation. Default value: - "Upsert" . + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. @@ -69,7 +71,7 @@ class DynamicsCrmSink(CopySink): write_behavior = "Upsert" - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None): - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.ignore_null_values = ignore_null_values + def __init__(self, **kwargs): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py new file mode 100644 index 000000000000..d9f4fcf092c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py index 6c22c8c5dd07..641fad43f437 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py @@ -15,6 +15,8 @@ class DynamicsCrmSource(CopySource): """A copy activity Dynamics CRM source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DynamicsCrmSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression @@ -50,7 +52,7 @@ class DynamicsCrmSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py new file mode 100644 index 000000000000..29c3e78609a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py index 0ef30c87b5b1..435c6d153066 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py @@ -15,6 +15,8 @@ class DynamicsEntityDataset(Dataset): """The Dynamics entity dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class DynamicsEntityDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class DynamicsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class DynamicsEntityDataset(Dataset): 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None): - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.entity_name = entity_name + def __init__(self, **kwargs): + super(DynamicsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py new file mode 100644 index 000000000000..7ee671890354 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py index 7d0111ef5fbd..c925033d1240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -15,6 +15,8 @@ class DynamicsLinkedService(LinkedService): """Dynamics linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,11 +31,12 @@ class DynamicsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param deployment_type: The deployment type of the Dynamics instance. - 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics - on-premises with Ifd. Type: string (or Expression with resultType string). + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string @@ -52,12 +55,12 @@ class DynamicsLinkedService(LinkedService): are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object - :param authentication_type: The authentication type to connect to Dynamics - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd - scenario. Type: string (or Expression with resultType string). + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). :type authentication_type: object - :param username: User name to access the Dynamics instance. Type: string - (or Expression with resultType string). + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics instance. :type password: ~azure.mgmt.datafactory.models.SecretBase @@ -92,15 +95,15 @@ class DynamicsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None): - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(DynamicsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py new file mode 100644 index 000000000000..07c028ff2477 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Dynamics' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 47015ec8fdad..45bac7b52064 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -18,6 +18,8 @@ class DynamicsSink(CopySink): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -39,10 +41,10 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :ivar write_behavior: The write behavior for the operation. Default value: - "Upsert" . + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is @@ -69,7 +71,7 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None): - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.ignore_null_values = ignore_null_values + def __init__(self, **kwargs): + super(DynamicsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py new file mode 100644 index 000000000000..5f736f9cf658 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py index 5f6c0472bedc..d38f96fee911 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -15,6 +15,8 @@ class DynamicsSource(CopySource): """A copy activity Dynamics source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class DynamicsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression @@ -50,7 +52,7 @@ class DynamicsSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(DynamicsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py new file mode 100644 index 000000000000..12d83625bc6a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py index f9527b58a4f0..6249c2e2334b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -15,6 +15,8 @@ class EloquaLinkedService(LinkedService): """Eloqua server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,13 +31,13 @@ class EloquaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the Eloqua server. (i.e. + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com) :type endpoint: object - :param username: The site name and user name of your Eloqua account in the - form: sitename/username. (i.e. Eloqua/Alice) + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) :type username: object :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase @@ -77,13 +79,13 @@ class EloquaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(EloquaLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py new file mode 100644 index 000000000000..623d798036a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Eloqua' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py index ca33bd388e2a..56adc0ce47c4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py @@ -15,6 +15,8 @@ class EloquaObjectDataset(Dataset): """Eloqua server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class EloquaObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class EloquaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class EloquaObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(EloquaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py new file mode 100644 index 000000000000..705f43cd225c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'EloquaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py index 94d399ea1a32..f016140189f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -15,6 +15,8 @@ class EloquaSource(CopySource): """A copy activity Eloqua server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class EloquaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class EloquaSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(EloquaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py new file mode 100644 index 000000000000..d200ff32fd9d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'EloquaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py index ee7eeed24a3d..5db1448a5a55 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -28,7 +28,7 @@ class EntityReference(Model): 'reference_name': {'key': 'referenceName', 'type': 'str'}, } - def __init__(self, type=None, reference_name=None): - super(EntityReference, self).__init__() - self.type = type - self.reference_name = reference_name + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py index a28ea529c95b..0008b5eee153 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py @@ -15,10 +15,12 @@ class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,9 +28,9 @@ class ExecutePipelineActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param pipeline: Pipeline reference. + :param pipeline: Required. Pipeline reference. :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] @@ -55,9 +57,9 @@ class ExecutePipelineActivity(ControlActivity): 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, } - def __init__(self, name, pipeline, additional_properties=None, description=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion=None): - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.pipeline = pipeline - self.parameters = parameters - self.wait_on_completion = wait_on_completion + def __init__(self, **kwargs): + super(ExecutePipelineActivity, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.parameters = kwargs.get('parameters', None) + self.wait_on_completion = kwargs.get('wait_on_completion', None) self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py new file mode 100644 index 000000000000..addaafabe7b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion + self.type = 'ExecutePipeline' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py index b9003be9c119..9efa853dac86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -15,10 +15,12 @@ class ExecuteSSISPackageActivity(ExecutionActivity): """Execute SSIS package activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,14 +28,14 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param package_location: SSIS package location. + :param package_location: Required. SSIS package location. :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType @@ -48,7 +50,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :param execution_credential: The package execution credential. :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential - :param connect_via: The integration runtime reference. + :param connect_via: Required. The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param project_parameters: The project level parameters to execute the @@ -105,18 +107,18 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } - def __init__(self, name, package_location, connect_via, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None): - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.package_location = package_location - self.runtime = runtime - self.logging_level = logging_level - self.environment_path = environment_path - self.execution_credential = execution_credential - self.connect_via = connect_via - self.project_parameters = project_parameters - self.package_parameters = package_parameters - self.project_connection_managers = project_connection_managers - self.package_connection_managers = package_connection_managers - self.property_overrides = property_overrides - self.log_location = log_location + def __init__(self, **kwargs): + super(ExecuteSSISPackageActivity, self).__init__(**kwargs) + self.package_location = kwargs.get('package_location', None) + self.runtime = kwargs.get('runtime', None) + self.logging_level = kwargs.get('logging_level', None) + self.environment_path = kwargs.get('environment_path', None) + self.execution_credential = kwargs.get('execution_credential', None) + self.connect_via = kwargs.get('connect_via', None) + self.project_parameters = kwargs.get('project_parameters', None) + self.package_parameters = kwargs.get('package_parameters', None) + self.project_connection_managers = kwargs.get('project_connection_managers', None) + self.package_connection_managers = kwargs.get('package_connection_managers', None) + self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py new file mode 100644 index 000000000000..64efa9cd63ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.log_location = log_location + self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py index b2b0bbcf9714..8c16eff2c753 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -26,10 +26,12 @@ class ExecutionActivity(Activity): HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -37,7 +39,7 @@ class ExecutionActivity(Activity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -66,8 +68,8 @@ class ExecutionActivity(Activity): 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None): - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.linked_service_name = linked_service_name - self.policy = policy + def __init__(self, **kwargs): + super(ExecutionActivity, self).__init__(**kwargs) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py new file mode 100644 index 000000000000..5deb58db81a7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity_py3 import Activity + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + SqlServerStoredProcedureActivity, CustomActivity, + ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.linked_service_name = linked_service_name + self.policy = policy + self.type = 'Execution' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py index c10882c1f471..a6a2cc280b4d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py @@ -26,7 +26,7 @@ class ExposureControlRequest(Model): 'feature_type': {'key': 'featureType', 'type': 'str'}, } - def __init__(self, feature_name=None, feature_type=None): - super(ExposureControlRequest, self).__init__() - self.feature_name = feature_name - self.feature_type = feature_type + def __init__(self, **kwargs): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = kwargs.get('feature_name', None) + self.feature_type = kwargs.get('feature_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py new file mode 100644 index 000000000000..b3f4099fb972 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = feature_name + self.feature_type = feature_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py index 51237ef9536a..868647e3c5b3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py @@ -34,7 +34,7 @@ class ExposureControlResponse(Model): 'value': {'key': 'value', 'type': 'str'}, } - def __init__(self): - super(ExposureControlResponse, self).__init__() + def __init__(self, **kwargs): + super(ExposureControlResponse, self).__init__(**kwargs) self.feature_name = None self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py new file mode 100644 index 000000000000..1ac7138e7984 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py index 1dcebd0c48de..4b16ceca2794 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py @@ -18,9 +18,11 @@ class Expression(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: Expression type. Default value: "Expression" . + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . :vartype type: str - :param value: Expression value. + :param value: Required. Expression value. :type value: str """ @@ -36,6 +38,6 @@ class Expression(Model): type = "Expression" - def __init__(self, value): - super(Expression, self).__init__() - self.value = value + def __init__(self, **kwargs): + super(Expression, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py new file mode 100644 index 000000000000..c6ad023a57ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, *, value: str, **kwargs) -> None: + super(Expression, self).__init__(**kwargs) + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py index c425c329d199..614b3d7fc97a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py @@ -71,11 +71,11 @@ class Factory(Resource): 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, } - def __init__(self, location=None, tags=None, additional_properties=None, identity=None, repo_configuration=None): - super(Factory, self).__init__(location=location, tags=tags) - self.additional_properties = additional_properties - self.identity = identity + def __init__(self, **kwargs): + super(Factory, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.identity = kwargs.get('identity', None) self.provisioning_state = None self.create_time = None self.version = None - self.repo_configuration = repo_configuration + self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py index 30ab5463f015..02cec39d8313 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py @@ -15,17 +15,19 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): """Factory's GitHub repo information. - :param account_name: Account name. + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. :type account_name: str - :param repository_name: Repository name. + :param repository_name: Required. Repository name. :type repository_name: str - :param collaboration_branch: Collaboration branch. + :param collaboration_branch: Required. Collaboration branch. :type collaboration_branch: str - :param root_folder: Root folder. + :param root_folder: Required. Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com @@ -50,7 +52,7 @@ class FactoryGitHubConfiguration(FactoryRepoConfiguration): 'host_name': {'key': 'hostName', 'type': 'str'}, } - def __init__(self, account_name, repository_name, collaboration_branch, root_folder, last_commit_id=None, host_name=None): - super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id) - self.host_name = host_name + def __init__(self, **kwargs): + super(FactoryGitHubConfiguration, self).__init__(**kwargs) + self.host_name = kwargs.get('host_name', None) self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py new file mode 100644 index 000000000000..23c5dbf21f0c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration_py3 import FactoryRepoConfiguration + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: + super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.host_name = host_name + self.type = 'FactoryGitHubConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py index e1c7644fee88..dad745424af3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py @@ -18,8 +18,10 @@ class FactoryIdentity(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: The identity type. Currently the only supported type is - 'SystemAssigned'. Default value: "SystemAssigned" . + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . :vartype type: str :ivar principal_id: The principal id of the identity. :vartype principal_id: str @@ -41,7 +43,7 @@ class FactoryIdentity(Model): type = "SystemAssigned" - def __init__(self): - super(FactoryIdentity, self).__init__() + def __init__(self, **kwargs): + super(FactoryIdentity, self).__init__(**kwargs) self.principal_id = None self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py new file mode 100644 index 000000000000..567100d8c19e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs) -> None: + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py new file mode 100644 index 000000000000..0682aa5f8852 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource_py3 import Resource + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: + super(Factory, self).__init__(location=location, tags=tags, **kwargs) + self.additional_properties = additional_properties + self.identity = identity + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py index d470fece8248..7c20db016c71 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py @@ -18,17 +18,19 @@ class FactoryRepoConfiguration(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration - :param account_name: Account name. + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. :type account_name: str - :param repository_name: Repository name. + :param repository_name: Required. Repository name. :type repository_name: str - :param collaboration_branch: Collaboration branch. + :param collaboration_branch: Required. Collaboration branch. :type collaboration_branch: str - :param root_folder: Root folder. + :param root_folder: Required. Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -53,11 +55,11 @@ class FactoryRepoConfiguration(Model): 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} } - def __init__(self, account_name, repository_name, collaboration_branch, root_folder, last_commit_id=None): - super(FactoryRepoConfiguration, self).__init__() - self.account_name = account_name - self.repository_name = repository_name - self.collaboration_branch = collaboration_branch - self.root_folder = root_folder - self.last_commit_id = last_commit_id + def __init__(self, **kwargs): + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.repository_name = kwargs.get('repository_name', None) + self.collaboration_branch = kwargs.get('collaboration_branch', None) + self.root_folder = kwargs.get('root_folder', None) + self.last_commit_id = kwargs.get('last_commit_id', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py new file mode 100644 index 000000000000..eefed7978850 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = account_name + self.repository_name = repository_name + self.collaboration_branch = collaboration_branch + self.root_folder = root_folder + self.last_commit_id = last_commit_id + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py index 0a29c7c8a9bf..44eac9d287ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py @@ -27,7 +27,7 @@ class FactoryRepoUpdate(Model): 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, } - def __init__(self, factory_resource_id=None, repo_configuration=None): - super(FactoryRepoUpdate, self).__init__() - self.factory_resource_id = factory_resource_id - self.repo_configuration = repo_configuration + def __init__(self, **kwargs): + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = kwargs.get('factory_resource_id', None) + self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py new file mode 100644 index 000000000000..68aca7a48db8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = factory_resource_id + self.repo_configuration = repo_configuration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py index 0524027900dd..e9977fceff86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py @@ -26,7 +26,7 @@ class FactoryUpdateParameters(Model): 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, } - def __init__(self, tags=None, identity=None): - super(FactoryUpdateParameters, self).__init__() - self.tags = tags - self.identity = identity + def __init__(self, **kwargs): + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py new file mode 100644 index 000000000000..5bd523fedf3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, *, tags=None, identity=None, **kwargs) -> None: + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = tags + self.identity = identity diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py index 3ee1c33ba563..6d07c68d23e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py @@ -15,19 +15,21 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): """Factory's VSTS repo information. - :param account_name: Account name. + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. :type account_name: str - :param repository_name: Repository name. + :param repository_name: Required. Repository name. :type repository_name: str - :param collaboration_branch: Collaboration branch. + :param collaboration_branch: Required. Collaboration branch. :type collaboration_branch: str - :param root_folder: Root folder. + :param root_folder: Required. Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param project_name: VSTS project name. + :param project_name: Required. VSTS project name. :type project_name: str :param tenant_id: VSTS tenant id. :type tenant_id: str @@ -53,8 +55,8 @@ class FactoryVSTSConfiguration(FactoryRepoConfiguration): 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } - def __init__(self, account_name, repository_name, collaboration_branch, root_folder, project_name, last_commit_id=None, tenant_id=None): - super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id) - self.project_name = project_name - self.tenant_id = tenant_id + def __init__(self, **kwargs): + super(FactoryVSTSConfiguration, self).__init__(**kwargs) + self.project_name = kwargs.get('project_name', None) + self.tenant_id = kwargs.get('tenant_id', None) self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py new file mode 100644 index 000000000000..4f13c0959d63 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration_py3 import FactoryRepoConfiguration + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: + super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.project_name = project_name + self.tenant_id = tenant_id + self.type = 'FactoryVSTSConfiguration' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py index e47347966c69..ffced5c2e689 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -15,6 +15,8 @@ class FileServerLinkedService(LinkedService): """File system linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class FileServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: Host name of the server. Type: string (or Expression with - resultType string). + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). :type host: object :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). @@ -63,10 +65,10 @@ class FileServerLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None): - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(FileServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py new file mode 100644 index 000000000000..ec6fe58bb3a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'FileServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py index 247ff02a87a8..edce5fe68a65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -15,10 +15,12 @@ class FileServerLocation(DatasetLocation): """The location of file server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -32,5 +34,12 @@ class FileServerLocation(DatasetLocation): 'type': {'required': True}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): - super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py index 00b1ea4539cc..da9d0809e03a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings.py @@ -15,10 +15,12 @@ class FileServerReadSettings(StoreReadSettings): """File server read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -61,11 +63,11 @@ class FileServerReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None): - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + def __init__(self, **kwargs): + super(FileServerReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py new file mode 100644 index 000000000000..1fadb49b1795 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_settings_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py index 39a21b076162..e3bc7946d1ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings.py @@ -15,6 +15,8 @@ class FileServerWriteSettings(StoreWriteSettings): """File server write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class FileServerWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -32,6 +34,13 @@ class FileServerWriteSettings(StoreWriteSettings): 'type': {'required': True}, } - def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None): - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSettings, self).__init__(**kwargs) self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py new file mode 100644 index 000000000000..b174cf537577 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_write_settings_py3 import StoreWriteSettings + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py index adafa94f6030..6874f4c08929 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -15,6 +15,8 @@ class FileShareDataset(Dataset): """An on-premises file system dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class FileShareDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class FileShareDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). @@ -87,13 +89,13 @@ class FileShareDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None): - super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.folder_path = folder_path - self.file_name = file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.format = format - self.file_filter = file_filter - self.compression = compression + def __init__(self, **kwargs): + super(FileShareDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.file_filter = kwargs.get('file_filter', None) + self.compression = kwargs.get('compression', None) self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py new file mode 100644 index 000000000000..19e88a264e12 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.file_filter = file_filter + self.compression = compression + self.type = 'FileShare' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 11baf9c0567e..8b8f238c9534 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -15,6 +15,8 @@ class FileSystemSink(CopySink): """A copy activity file system sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object @@ -57,7 +59,7 @@ class FileSystemSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None): - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.copy_behavior = copy_behavior + def __init__(self, **kwargs): + super(FileSystemSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py new file mode 100644 index 000000000000..24f8623cbb02 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'FileSystemSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py index 751ae6c44f67..2986b1848153 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -15,6 +15,8 @@ class FileSystemSource(CopySource): """A copy activity file system source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class FileSystemSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType @@ -50,7 +52,7 @@ class FileSystemSource(CopySource): 'recursive': {'key': 'recursive', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None): - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive + def __init__(self, **kwargs): + super(FileSystemSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py new file mode 100644 index 000000000000..0598490ca51c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'FileSystemSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py index ed9e2ca9c8fc..1346bb234695 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py @@ -15,10 +15,12 @@ class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,11 +28,11 @@ class FilterActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param items: Input array on which filter should be applied. + :param items: Required. Input array on which filter should be applied. :type items: ~azure.mgmt.datafactory.models.Expression - :param condition: Condition to be used for filtering the input. + :param condition: Required. Condition to be used for filtering the input. :type condition: ~azure.mgmt.datafactory.models.Expression """ @@ -52,8 +54,8 @@ class FilterActivity(ControlActivity): 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, } - def __init__(self, name, items, condition, additional_properties=None, description=None, depends_on=None, user_properties=None): - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.items = items - self.condition = condition + def __init__(self, **kwargs): + super(FilterActivity, self).__init__(**kwargs) + self.items = kwargs.get('items', None) + self.condition = kwargs.get('condition', None) self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py new file mode 100644 index 000000000000..a07cf01d1dd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.items = items + self.condition = condition + self.type = 'Filter' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py index 26266fd752bc..5edfa2a8140e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py @@ -16,10 +16,12 @@ class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -27,7 +29,7 @@ class ForEachActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param is_sequential: Should the loop be executed in sequence or in parallel (max 50) @@ -35,9 +37,9 @@ class ForEachActivity(ControlActivity): :param batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :type batch_count: int - :param items: Collection to iterate. + :param items: Required. Collection to iterate. :type items: ~azure.mgmt.datafactory.models.Expression - :param activities: List of activities to execute . + :param activities: Required. List of activities to execute . :type activities: list[~azure.mgmt.datafactory.models.Activity] """ @@ -62,10 +64,10 @@ class ForEachActivity(ControlActivity): 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, } - def __init__(self, name, items, activities, additional_properties=None, description=None, depends_on=None, user_properties=None, is_sequential=None, batch_count=None): - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.is_sequential = is_sequential - self.batch_count = batch_count - self.items = items - self.activities = activities + def __init__(self, **kwargs): + super(ForEachActivity, self).__init__(**kwargs) + self.is_sequential = kwargs.get('is_sequential', None) + self.batch_count = kwargs.get('batch_count', None) + self.items = kwargs.get('items', None) + self.activities = kwargs.get('activities', None) self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py new file mode 100644 index 000000000000..7c5c887bb1d9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities + self.type = 'ForEach' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py index 0d6da105b65b..d5213138b96a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings.py @@ -15,10 +15,12 @@ class FormatReadSettings(Model): """Format read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str """ @@ -31,7 +33,7 @@ class FormatReadSettings(Model): 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, type, additional_properties=None): - super(FormatReadSettings, self).__init__() - self.additional_properties = additional_properties - self.type = type + def __init__(self, **kwargs): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py new file mode 100644 index 000000000000..326da0277b89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_settings_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSettings(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py index 41b154dae4de..2100c6055d0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings.py @@ -15,10 +15,12 @@ class FormatWriteSettings(Model): """Format write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The write setting type. + :param type: Required. The write setting type. :type type: str """ @@ -31,7 +33,7 @@ class FormatWriteSettings(Model): 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, type, additional_properties=None): - super(FormatWriteSettings, self).__init__() - self.additional_properties = additional_properties - self.type = type + def __init__(self, **kwargs): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py new file mode 100644 index 000000000000..4150eceffc1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_settings_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSettings(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py index 47b77a237ea2..e023f9ae91f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings.py @@ -15,10 +15,12 @@ class FtpReadSettings(StoreReadSettings): """Ftp read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -53,9 +55,9 @@ class FtpReadSettings(StoreReadSettings): 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer=None): - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.use_binary_transfer = use_binary_transfer + def __init__(self, **kwargs): + super(FtpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py new file mode 100644 index 000000000000..748d306307ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_settings_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py index 51bf170dd8c2..e649ca56e37c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -15,6 +15,8 @@ class FtpServerLinkedService(LinkedService): """A FTP server Linked Service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class FtpServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: Host name of the FTP server. Type: string (or Expression with - resultType string). + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). :type host: object :param port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with @@ -83,14 +85,14 @@ class FtpServerLinkedService(LinkedService): 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None): - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.enable_ssl = enable_ssl - self.enable_server_certificate_validation = enable_server_certificate_validation + def __init__(self, **kwargs): + super(FtpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py new file mode 100644 index 000000000000..b38ad1c03f46 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'FtpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py index 61ef5bef42cc..5d5e933036df 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -15,10 +15,12 @@ class FtpServerLocation(DatasetLocation): """The location of ftp server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -32,5 +34,12 @@ class FtpServerLocation(DatasetLocation): 'type': {'required': True}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): - super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py index 7f08bf98769d..7941189f2dcd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py @@ -15,10 +15,12 @@ class GetMetadataActivity(ExecutionActivity): """Activity to get metadata of dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,14 +28,14 @@ class GetMetadataActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param dataset: GetMetadata activity dataset reference. + :param dataset: Required. GetMetadata activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] @@ -58,8 +60,8 @@ class GetMetadataActivity(ExecutionActivity): 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, } - def __init__(self, name, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None): - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.dataset = dataset - self.field_list = field_list + def __init__(self, **kwargs): + super(GetMetadataActivity, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + self.field_list = kwargs.get('field_list', None) self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py new file mode 100644 index 000000000000..b4d8eb17cab1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.dataset = dataset + self.field_list = field_list + self.type = 'GetMetadata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py index 5e37054dfd10..1be4a2afece0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py @@ -23,6 +23,6 @@ class GetSsisObjectMetadataRequest(Model): 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, } - def __init__(self, metadata_path=None): - super(GetSsisObjectMetadataRequest, self).__init__() - self.metadata_path = metadata_path + def __init__(self, **kwargs): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = kwargs.get('metadata_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py new file mode 100644 index 000000000000..310cd9783d81 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, *, metadata_path: str=None, **kwargs) -> None: + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = metadata_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py index 7470e856228d..cadecdf70f44 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py @@ -15,11 +15,14 @@ class GitHubAccessTokenRequest(Model): """Get GitHub access token request definition. - :param git_hub_access_code: GitHub access code. + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str - :param git_hub_access_token_base_url: GitHub access token base URL. + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. :type git_hub_access_token_base_url: str """ @@ -34,8 +37,8 @@ class GitHubAccessTokenRequest(Model): 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } - def __init__(self, git_hub_access_code, git_hub_access_token_base_url, git_hub_client_id=None): - super(GitHubAccessTokenRequest, self).__init__() - self.git_hub_access_code = git_hub_access_code - self.git_hub_client_id = git_hub_client_id - self.git_hub_access_token_base_url = git_hub_access_token_base_url + def __init__(self, **kwargs): + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = kwargs.get('git_hub_access_code', None) + self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py new file mode 100644 index 000000000000..7961e1bc33ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = git_hub_access_code + self.git_hub_client_id = git_hub_client_id + self.git_hub_access_token_base_url = git_hub_access_token_base_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py index e1636ec59bb0..4a4afce8f0f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py @@ -23,6 +23,6 @@ class GitHubAccessTokenResponse(Model): 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, } - def __init__(self, git_hub_access_token=None): - super(GitHubAccessTokenResponse, self).__init__() - self.git_hub_access_token = git_hub_access_token + def __init__(self, **kwargs): + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = kwargs.get('git_hub_access_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py new file mode 100644 index 000000000000..4f28ade6e914 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = git_hub_access_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py index 4b3e55b5c3b2..c460dd95c380 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -15,6 +15,8 @@ class GoogleAdWordsLinkedService(LinkedService): """Google AdWords service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,17 +31,18 @@ class GoogleAdWordsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param client_customer_id: The Client customer ID of the AdWords account - that you want to fetch report data for. + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. :type client_customer_id: object - :param developer_token: The developer token associated with the manager - account that you use to grant access to the AdWords API. + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. :type developer_token: ~azure.mgmt.datafactory.models.SecretBase - :param authentication_type: The OAuth 2.0 authentication mechanism used - for authentication. ServiceAuthentication can only be used on self-hosted - IR. Possible values include: 'ServiceAuthentication', 'UserAuthentication' + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' :type authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :param refresh_token: The refresh token obtained from Google for @@ -100,17 +103,17 @@ class GoogleAdWordsLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None): - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.client_customer_id = client_customer_id - self.developer_token = developer_token - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py new file mode 100644 index 000000000000..dfb3bc07e69f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py index 74ef1a12fa04..92b901b774ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -15,6 +15,8 @@ class GoogleAdWordsObjectDataset(Dataset): """Google AdWords service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class GoogleAdWordsObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class GoogleAdWordsObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class GoogleAdWordsObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py new file mode 100644 index 000000000000..e1272f978b8e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py index e9b8fbf213ed..8699057abe09 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -15,6 +15,8 @@ class GoogleAdWordsSource(CopySource): """A copy activity Google AdWords service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class GoogleAdWordsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class GoogleAdWordsSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py new file mode 100644 index 000000000000..995d5324670b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py index c1cbbc8f7c9e..45a535b95d43 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -15,6 +15,8 @@ class GoogleBigQueryLinkedService(LinkedService): """Google BigQuery service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class GoogleBigQueryLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param project: The default BigQuery project to query against. + :param project: Required. The default BigQuery project to query against. :type project: object :param additional_projects: A comma-separated list of public BigQuery projects to access. @@ -41,9 +43,10 @@ class GoogleBigQueryLinkedService(LinkedService): that combine BigQuery data with data from Google Drive. The default value is false. :type request_google_drive_scope: object - :param authentication_type: The OAuth 2.0 authentication mechanism used - for authentication. ServiceAuthentication can only be used on self-hosted - IR. Possible values include: 'ServiceAuthentication', 'UserAuthentication' + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' :type authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :param refresh_token: The refresh token obtained from Google for @@ -104,18 +107,18 @@ class GoogleBigQueryLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, project, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None): - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.project = project - self.additional_projects = additional_projects - self.request_google_drive_scope = request_google_drive_scope - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(GoogleBigQueryLinkedService, self).__init__(**kwargs) + self.project = kwargs.get('project', None) + self.additional_projects = kwargs.get('additional_projects', None) + self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py new file mode 100644 index 000000000000..146674a85531 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleBigQuery' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py index d891958c67ab..920489742bbf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py @@ -15,6 +15,8 @@ class GoogleBigQueryObjectDataset(Dataset): """Google BigQuery service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class GoogleBigQueryObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class GoogleBigQueryObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using database + table properties instead. @@ -72,9 +74,9 @@ class GoogleBigQueryObjectDataset(Dataset): 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None): - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.dataset = dataset + def __init__(self, **kwargs): + super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py new file mode 100644 index 000000000000..205819f8eeef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + database + table properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or + Expression with resultType string). + :type dataset: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.dataset = dataset + self.type = 'GoogleBigQueryObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py index 0322e3b64bea..3a28d2563a8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -15,6 +15,8 @@ class GoogleBigQuerySource(CopySource): """A copy activity Google BigQuery service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class GoogleBigQuerySource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class GoogleBigQuerySource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(GoogleBigQuerySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py new file mode 100644 index 000000000000..49364b4d0e3f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleBigQuerySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py index 24b39e36cc26..57913f779ca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -15,6 +15,8 @@ class GreenplumLinkedService(LinkedService): """Greenplum Database linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class GreenplumLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -59,9 +61,9 @@ class GreenplumLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(GreenplumLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py new file mode 100644 index 000000000000..bd707a5e85c9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Greenplum' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py index 06ad92aab216..086f12419f4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -15,6 +15,8 @@ class GreenplumSource(CopySource): """A copy activity Greenplum Database source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class GreenplumSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class GreenplumSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(GreenplumSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py new file mode 100644 index 000000000000..8b789deb43da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GreenplumSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py index 500671398856..eb0ea08ee544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py @@ -15,6 +15,8 @@ class GreenplumTableDataset(Dataset): """Greenplum Database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class GreenplumTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class GreenplumTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class GreenplumTableDataset(Dataset): 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None): - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.greenplum_table_dataset_schema = greenplum_table_dataset_schema + def __init__(self, **kwargs): + super(GreenplumTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.greenplum_table_dataset_schema = kwargs.get('greenplum_table_dataset_schema', None) self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py new file mode 100644 index 000000000000..7f37fff9108d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression + with resultType string). + :type table: object + :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: + string (or Expression with resultType string). + :type greenplum_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.greenplum_table_dataset_schema = greenplum_table_dataset_schema + self.type = 'GreenplumTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py index acf0d8cbd35d..b6affd5caa0d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -15,6 +15,8 @@ class HBaseLinkedService(LinkedService): """HBase server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class HBaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The IP address or host name of the HBase server. (i.e. - 192.168.222.160) + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) :type host: object :param port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. @@ -40,8 +42,9 @@ class HBaseLinkedService(LinkedService): :param http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) :type http_path: object - :param authentication_type: The authentication mechanism to use to connect - to the HBase server. Possible values include: 'Anonymous', 'Basic' + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. @@ -95,17 +98,17 @@ class HBaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(HBaseLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py new file mode 100644 index 000000000000..a8823e2e8937 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'HBase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py index acc6ca54817c..5de32bcb6871 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py @@ -15,6 +15,8 @@ class HBaseObjectDataset(Dataset): """HBase server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class HBaseObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class HBaseObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class HBaseObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(HBaseObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py new file mode 100644 index 000000000000..27fc0d1514ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HBaseObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py index e1fcefaac0a7..eb6e3f1789bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -15,6 +15,8 @@ class HBaseSource(CopySource): """A copy activity HBase server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class HBaseSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class HBaseSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(HBaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py new file mode 100644 index 000000000000..b2680e95c212 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HBaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py index 43053ab05290..4110b0f8b7de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py @@ -15,10 +15,12 @@ class HDInsightHiveActivity(ExecutionActivity): """HDInsight Hive activity type. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class HDInsightHiveActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -81,14 +83,14 @@ class HDInsightHiveActivity(ExecutionActivity): 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout=None): - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.variables = variables - self.query_timeout = query_timeout + def __init__(self, **kwargs): + super(HDInsightHiveActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py new file mode 100644 index 000000000000..f8a5441fe767 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout + self.type = 'HDInsightHive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py index 673eee3ff5ea..810525342d82 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -15,6 +15,8 @@ class HDInsightLinkedService(LinkedService): """HDInsight linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class HDInsightLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param cluster_uri: HDInsight cluster URI. Type: string (or Expression - with resultType string). + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). :type cluster_uri: object :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). @@ -81,14 +83,14 @@ class HDInsightLinkedService(LinkedService): 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } - def __init__(self, cluster_uri, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None): - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.cluster_uri = cluster_uri - self.user_name = user_name - self.password = password - self.linked_service_name = linked_service_name - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.encrypted_credential = encrypted_credential - self.is_esp_enabled = is_esp_enabled - self.file_system = file_system + def __init__(self, **kwargs): + super(HDInsightLinkedService, self).__init__(**kwargs) + self.cluster_uri = kwargs.get('cluster_uri', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py new file mode 100644 index 000000000000..5c384f7d6288 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the + HDInsight is ADLS Gen2. Type: string (or Expression with resultType + string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.file_system = file_system + self.type = 'HDInsight' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py index 7b604ae76697..20655843e1db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py @@ -15,10 +15,12 @@ class HDInsightMapReduceActivity(ExecutionActivity): """HDInsight MapReduce activity type. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class HDInsightMapReduceActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -42,10 +44,10 @@ class HDInsightMapReduceActivity(ExecutionActivity): 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param class_name: Class name. Type: string (or Expression with resultType - string). + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). :type class_name: object - :param jar_file_path: Jar path. Type: string (or Expression with + :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). :type jar_file_path: object :param jar_linked_service: Jar linked service reference. @@ -84,14 +86,14 @@ class HDInsightMapReduceActivity(ExecutionActivity): 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } - def __init__(self, name, class_name, jar_file_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None): - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.class_name = class_name - self.jar_file_path = jar_file_path - self.jar_linked_service = jar_linked_service - self.jar_libs = jar_libs - self.defines = defines + def __init__(self, **kwargs): + super(HDInsightMapReduceActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.class_name = kwargs.get('class_name', None) + self.jar_file_path = kwargs.get('jar_file_path', None) + self.jar_linked_service = kwargs.get('jar_linked_service', None) + self.jar_libs = kwargs.get('jar_libs', None) + self.defines = kwargs.get('defines', None) self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py new file mode 100644 index 000000000000..dffa9f119069 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines + self.type = 'HDInsightMapReduce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py index c9154860ec49..d386aac9d9aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -15,6 +15,8 @@ class HDInsightOnDemandLinkedService(LinkedService): """HDInsight ondemand linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,37 +31,37 @@ class HDInsightOnDemandLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param cluster_size: Number of worker/data nodes in the cluster. + :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). :type cluster_size: object - :param time_to_live: The allowed idle time for the on-demand HDInsight - cluster. Specifies how long the on-demand HDInsight cluster stays alive - after completion of an activity run if there are no other active jobs in - the cluster. The minimum value is 5 mins. Type: string (or Expression with - resultType string). - :type time_to_live: object - :param version: Version of the HDInsight cluster.  Type: string (or + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). :type version: object - :param linked_service_name: Azure Storage linked service to be used by the - on-demand cluster for storing and processing data. + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param host_subscription_id: The customer’s subscription to host the - cluster. Type: string (or Expression with resultType string). + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). :type host_subscription_id: object :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key for the service principal id. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase - :param tenant: The Tenant id/name to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: The resource group where the cluster + :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). :type cluster_resource_group: object :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType @@ -197,39 +199,39 @@ class HDInsightOnDemandLinkedService(LinkedService): 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } - def __init__(self, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None): - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.cluster_size = cluster_size - self.time_to_live = time_to_live - self.version = version - self.linked_service_name = linked_service_name - self.host_subscription_id = host_subscription_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.cluster_resource_group = cluster_resource_group - self.cluster_name_prefix = cluster_name_prefix - self.cluster_user_name = cluster_user_name - self.cluster_password = cluster_password - self.cluster_ssh_user_name = cluster_ssh_user_name - self.cluster_ssh_password = cluster_ssh_password - self.additional_linked_service_names = additional_linked_service_names - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.cluster_type = cluster_type - self.spark_version = spark_version - self.core_configuration = core_configuration - self.h_base_configuration = h_base_configuration - self.hdfs_configuration = hdfs_configuration - self.hive_configuration = hive_configuration - self.map_reduce_configuration = map_reduce_configuration - self.oozie_configuration = oozie_configuration - self.storm_configuration = storm_configuration - self.yarn_configuration = yarn_configuration - self.encrypted_credential = encrypted_credential - self.head_node_size = head_node_size - self.data_node_size = data_node_size - self.zookeeper_node_size = zookeeper_node_size - self.script_actions = script_actions - self.virtual_network_id = virtual_network_id - self.subnet_name = subnet_name + def __init__(self, **kwargs): + super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) + self.cluster_size = kwargs.get('cluster_size', None) + self.time_to_live = kwargs.get('time_to_live', None) + self.version = kwargs.get('version', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.host_subscription_id = kwargs.get('host_subscription_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.cluster_resource_group = kwargs.get('cluster_resource_group', None) + self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) + self.cluster_user_name = kwargs.get('cluster_user_name', None) + self.cluster_password = kwargs.get('cluster_password', None) + self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) + self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) + self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.cluster_type = kwargs.get('cluster_type', None) + self.spark_version = kwargs.get('spark_version', None) + self.core_configuration = kwargs.get('core_configuration', None) + self.h_base_configuration = kwargs.get('h_base_configuration', None) + self.hdfs_configuration = kwargs.get('hdfs_configuration', None) + self.hive_configuration = kwargs.get('hive_configuration', None) + self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) + self.oozie_configuration = kwargs.get('oozie_configuration', None) + self.storm_configuration = kwargs.get('storm_configuration', None) + self.yarn_configuration = kwargs.get('yarn_configuration', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.head_node_size = kwargs.get('head_node_size', None) + self.data_node_size = kwargs.get('data_node_size', None) + self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) + self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py new file mode 100644 index 000000000000..178585c9b51d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py @@ -0,0 +1,237 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the + cluster should be joined after creation. Type: string (or Expression with + resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If + virtualNetworkId was specified, then this property is required. Type: + string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name + self.type = 'HDInsightOnDemand' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py index 50fec511ad70..61b939076db6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py @@ -15,10 +15,12 @@ class HDInsightPigActivity(ExecutionActivity): """HDInsight Pig activity type. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class HDInsightPigActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -74,12 +76,12 @@ class HDInsightPigActivity(ExecutionActivity): 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None): - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines + def __init__(self, **kwargs): + super(HDInsightPigActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py new file mode 100644 index 000000000000..fb149df91f39 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.type = 'HDInsightPig' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py index e4c28dfb2ee9..7822344f012f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py @@ -15,10 +15,12 @@ class HDInsightSparkActivity(ExecutionActivity): """HDInsight Spark activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,19 +28,19 @@ class HDInsightSparkActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param root_path: The root path in 'sparkJobLinkedService' for all the - job’s files. Type: string (or Expression with resultType string). + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). :type root_path: object - :param entry_file_path: The relative path to the root folder of the - code/package to be executed. Type: string (or Expression with resultType - string). + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). :type entry_file_path: object :param arguments: The user-specified arguments to HDInsightSparkActivity. :type arguments: list[object] @@ -85,14 +87,14 @@ class HDInsightSparkActivity(ExecutionActivity): 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, } - def __init__(self, name, root_path, entry_file_path, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name=None, proxy_user=None, spark_config=None): - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.root_path = root_path - self.entry_file_path = entry_file_path - self.arguments = arguments - self.get_debug_info = get_debug_info - self.spark_job_linked_service = spark_job_linked_service - self.class_name = class_name - self.proxy_user = proxy_user - self.spark_config = spark_config + def __init__(self, **kwargs): + super(HDInsightSparkActivity, self).__init__(**kwargs) + self.root_path = kwargs.get('root_path', None) + self.entry_file_path = kwargs.get('entry_file_path', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) + self.class_name = kwargs.get('class_name', None) + self.proxy_user = kwargs.get('proxy_user', None) + self.spark_config = kwargs.get('spark_config', None) self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py new file mode 100644 index 000000000000..3f305901abb7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config + self.type = 'HDInsightSpark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py index cd4bca1c142a..42146a5d6cc6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py @@ -15,10 +15,12 @@ class HDInsightStreamingActivity(ExecutionActivity): """HDInsight streaming activity type. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class HDInsightStreamingActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: @@ -42,19 +44,20 @@ class HDInsightStreamingActivity(ExecutionActivity): 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption - :param mapper: Mapper executable name. Type: string (or Expression with - resultType string). + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). :type mapper: object - :param reducer: Reducer executable name. Type: string (or Expression with - resultType string). + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). :type reducer: object - :param input: Input blob path. Type: string (or Expression with resultType - string). - :type input: object - :param output: Output blob path. Type: string (or Expression with + :param input: Required. Input blob path. Type: string (or Expression with resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). :type output: object - :param file_paths: Paths to streaming job files. Can be directories. + :param file_paths: Required. Paths to streaming job files. Can be + directories. :type file_paths: list[object] :param file_linked_service: Linked service reference where the files are located. @@ -102,18 +105,18 @@ class HDInsightStreamingActivity(ExecutionActivity): 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } - def __init__(self, name, mapper, reducer, input, output, file_paths, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None): - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.mapper = mapper - self.reducer = reducer - self.input = input - self.output = output - self.file_paths = file_paths - self.file_linked_service = file_linked_service - self.combiner = combiner - self.command_environment = command_environment - self.defines = defines + def __init__(self, **kwargs): + super(HDInsightStreamingActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.mapper = kwargs.get('mapper', None) + self.reducer = kwargs.get('reducer', None) + self.input = kwargs.get('input', None) + self.output = kwargs.get('output', None) + self.file_paths = kwargs.get('file_paths', None) + self.file_linked_service = kwargs.get('file_linked_service', None) + self.combiner = kwargs.get('combiner', None) + self.command_environment = kwargs.get('command_environment', None) + self.defines = kwargs.get('defines', None) self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py new file mode 100644 index 000000000000..2f5a301ff880 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines + self.type = 'HDInsightStreaming' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py index b403e8108e6f..b527f05a7e2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -15,6 +15,8 @@ class HdfsLinkedService(LinkedService): """Hadoop Distributed File System (HDFS) linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class HdfsLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The URL of the HDFS service endpoint, e.g. + :param url: Required. The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). :type url: object @@ -69,11 +71,11 @@ class HdfsLinkedService(LinkedService): 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None): - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.authentication_type = authentication_type - self.encrypted_credential = encrypted_credential - self.user_name = user_name - self.password = password + def __init__(self, **kwargs): + super(HdfsLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py new file mode 100644 index 000000000000..e004701e1da0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password + self.type = 'Hdfs' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py index 3d48a66895f9..a8f5d1ba332c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -15,10 +15,12 @@ class HdfsLocation(DatasetLocation): """The location of HDFS. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -32,5 +34,12 @@ class HdfsLocation(DatasetLocation): 'type': {'required': True}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): - super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py index ece39f76e7c7..ec4b98c50385 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings.py @@ -15,10 +15,12 @@ class HdfsReadSettings(StoreReadSettings): """HDFS read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -64,12 +66,12 @@ class HdfsReadSettings(StoreReadSettings): 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None): - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end - self.distcp_settings = distcp_settings + def __init__(self, **kwargs): + super(HdfsReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py new file mode 100644 index 000000000000..c37a045ec93c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_settings_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py index 1a859d544ef2..be50590f6c32 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -15,6 +15,8 @@ class HdfsSource(CopySource): """A copy activity HDFS source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class HdfsSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType @@ -53,8 +55,8 @@ class HdfsSource(CopySource): 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None): - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.distcp_settings = distcp_settings + def __init__(self, **kwargs): + super(HdfsSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.distcp_settings = kwargs.get('distcp_settings', None) self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py new file mode 100644 index 000000000000..3c60cab46289 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.distcp_settings = distcp_settings + self.type = 'HdfsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py index f7d8ff85b98f..c54c1393d56e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -15,6 +15,8 @@ class HiveLinkedService(LinkedService): """Hive Server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,11 @@ class HiveLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: IP address or host name of the Hive server, separated by ';' - for multiple hosts (only when serviceDiscoveryMode is enable). + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). :type host: object :param port: The TCP port that the Hive server uses to listen for client connections. @@ -44,8 +47,8 @@ class HiveLinkedService(LinkedService): Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' :type thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol - :param authentication_type: The authentication method used to access the - Hive server. Possible values include: 'Anonymous', 'Username', + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType @@ -122,23 +125,23 @@ class HiveLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.service_discovery_mode = service_discovery_mode - self.zoo_keeper_name_space = zoo_keeper_name_space - self.use_native_query = use_native_query - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(HiveLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.service_discovery_mode = kwargs.get('service_discovery_mode', None) + self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) + self.use_native_query = kwargs.get('use_native_query', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py new file mode 100644 index 000000000000..611d30ecb781 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py @@ -0,0 +1,147 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Hive' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py index 2521f219743a..07b6f2b54901 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py @@ -15,6 +15,8 @@ class HiveObjectDataset(Dataset): """Hive Server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class HiveObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class HiveObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class HiveObjectDataset(Dataset): 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None): - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.hive_object_dataset_schema = hive_object_dataset_schema + def __init__(self, **kwargs): + super(HiveObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.hive_object_dataset_schema = kwargs.get('hive_object_dataset_schema', None) self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py new file mode 100644 index 000000000000..69384bdfa99a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with + resultType string). + :type table: object + :param hive_object_dataset_schema: The schema name of the Hive. Type: + string (or Expression with resultType string). + :type hive_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.hive_object_dataset_schema = hive_object_dataset_schema + self.type = 'HiveObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py index 0dc4aa8e5aed..3af88c3280e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -15,6 +15,8 @@ class HiveSource(CopySource): """A copy activity Hive Server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class HiveSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class HiveSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(HiveSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py new file mode 100644 index 000000000000..6c09191b8c1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HiveSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py index 9df5a098e0c3..f2184dea151f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py @@ -15,6 +15,8 @@ class HttpDataset(Dataset): """A file in an HTTP web server. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class HttpDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class HttpDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with @@ -86,12 +88,12 @@ class HttpDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None): - super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.format = format - self.compression = compression + def __init__(self, **kwargs): + super(HttpDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py new file mode 100644 index 000000000000..09f97a03a95d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: + super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.format = format + self.compression = compression + self.type = 'HttpFile' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py index 0459cf8ea792..6232bc45fee4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -15,6 +15,8 @@ class HttpLinkedService(LinkedService): """Linked service for an HTTP source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class HttpLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The base URL of the HTTP endpoint, e.g. + :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). :type url: object @@ -90,14 +92,14 @@ class HttpLinkedService(LinkedService): 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None): - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.embedded_cert_data = embedded_cert_data - self.cert_thumbprint = cert_thumbprint - self.encrypted_credential = encrypted_credential - self.enable_server_certificate_validation = enable_server_certificate_validation + def __init__(self, **kwargs): + super(HttpLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.embedded_cert_data = kwargs.get('embedded_cert_data', None) + self.cert_thumbprint = kwargs.get('cert_thumbprint', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py new file mode 100644 index 000000000000..7f70adb08425 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint + self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'HttpServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py index d12ae2718ba1..a7c175da3489 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings.py @@ -15,10 +15,12 @@ class HttpReadSettings(StoreReadSettings): """Sftp read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -53,9 +55,9 @@ class HttpReadSettings(StoreReadSettings): 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None): - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.request_timeout = request_timeout + def __init__(self, **kwargs): + super(HttpReadSettings, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py new file mode 100644 index 000000000000..7cea9207c996 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_settings_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py index ece12af73d55..94106fae9d15 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -15,10 +15,12 @@ class HttpServerLocation(DatasetLocation): """The location of http server. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -43,6 +45,6 @@ class HttpServerLocation(DatasetLocation): 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None, relative_url=None): - super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) - self.relative_url = relative_url + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py index d27f1f9f7008..ae131aa16c8c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -15,6 +15,8 @@ class HttpSource(CopySource): """A copy activity source for an HTTP file. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class HttpSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to @@ -52,7 +54,7 @@ class HttpSource(CopySource): 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None): - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.http_request_timeout = http_request_timeout + def __init__(self, **kwargs): + super(HttpSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py new file mode 100644 index 000000000000..df339fc3aef7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.type = 'HttpSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py index be34343b5040..3d0d6cb3a6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -15,6 +15,8 @@ class HubspotLinkedService(LinkedService): """Hubspot Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,10 @@ class HubspotLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param client_id: The client ID associated with your Hubspot application. + :param client_id: Required. The client ID associated with your Hubspot + application. :type client_id: object :param client_secret: The client secret associated with your Hubspot application. @@ -80,14 +83,14 @@ class HubspotLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.client_id = client_id - self.client_secret = client_secret - self.access_token = access_token - self.refresh_token = refresh_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(HubspotLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.access_token = kwargs.get('access_token', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py new file mode 100644 index 000000000000..272d613e9cd1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Hubspot' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py index 4cd69a32be47..ce8994b4db4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py @@ -15,6 +15,8 @@ class HubspotObjectDataset(Dataset): """Hubspot Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class HubspotObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class HubspotObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class HubspotObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(HubspotObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py new file mode 100644 index 000000000000..bd2309101f72 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HubspotObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py index e72777465725..b4b4c618c33e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -15,6 +15,8 @@ class HubspotSource(CopySource): """A copy activity Hubspot Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class HubspotSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class HubspotSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(HubspotSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py new file mode 100644 index 000000000000..a29811342ce0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HubspotSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py index 9983ad2d4ce6..a8cb1da690e1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py @@ -17,10 +17,12 @@ class IfConditionActivity(ControlActivity): activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -28,10 +30,10 @@ class IfConditionActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param expression: An expression that would evaluate to Boolean. This is - used to determine the block of activities (ifTrueActivities or + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. :type expression: ~azure.mgmt.datafactory.models.Expression :param if_true_activities: List of activities to execute if expression is @@ -62,9 +64,9 @@ class IfConditionActivity(ControlActivity): 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, } - def __init__(self, name, expression, additional_properties=None, description=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None): - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.expression = expression - self.if_true_activities = if_true_activities - self.if_false_activities = if_false_activities + def __init__(self, **kwargs): + super(IfConditionActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.if_true_activities = kwargs.get('if_true_activities', None) + self.if_false_activities = kwargs.get('if_false_activities', None) self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py new file mode 100644 index 000000000000..7921a2602807 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities + self.type = 'IfCondition' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py index 14a6a12e1615..a704852652db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -15,6 +15,8 @@ class ImpalaLinkedService(LinkedService): """Impala server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,16 +31,17 @@ class ImpalaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The IP address or host name of the Impala server. (i.e. - 192.168.222.160) + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) :type host: object :param port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. :type port: object - :param authentication_type: The authentication type to use. Possible - values include: 'Anonymous', 'SASLUsername', 'UsernameAndPassword' + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The @@ -98,17 +101,17 @@ class ImpalaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ImpalaLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py new file mode 100644 index 000000000000..55b2e0c861d7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Impala' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py index f3033b55b65e..8faee4f09240 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py @@ -15,6 +15,8 @@ class ImpalaObjectDataset(Dataset): """Impala server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ImpalaObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ImpalaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class ImpalaObjectDataset(Dataset): 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None): - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.impala_object_dataset_schema = impala_object_dataset_schema + def __init__(self, **kwargs): + super(ImpalaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.impala_object_dataset_schema = kwargs.get('impala_object_dataset_schema', None) self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py new file mode 100644 index 000000000000..5652b5c9e4b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression + with resultType string). + :type table: object + :param impala_object_dataset_schema: The schema name of the Impala. Type: + string (or Expression with resultType string). + :type impala_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.impala_object_dataset_schema = impala_object_dataset_schema + self.type = 'ImpalaObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py index 903649612c18..9e27dbdb6266 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -15,6 +15,8 @@ class ImpalaSource(CopySource): """A copy activity Impala server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ImpalaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ImpalaSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ImpalaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py new file mode 100644 index 000000000000..f7dc4016d020 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ImpalaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py index 7127ff0774e1..2a58e7a0f7d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py @@ -15,6 +15,8 @@ class InformixLinkedService(LinkedService): """Informix linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class InformixLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The non-access credential portion of the - connection string as well as an optional encrypted credential. Type: + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the @@ -73,12 +75,12 @@ class InformixLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None): - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(InformixLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py new file mode 100644 index 000000000000..03aadada664d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py index a5a6c03d13d5..c511f4ecc174 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py @@ -15,6 +15,8 @@ class InformixSink(CopySink): """A copy activity Informix sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -58,7 +60,7 @@ class InformixSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script + def __init__(self, **kwargs): + super(InformixSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py new file mode 100644 index 000000000000..b0681ec0d423 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py index 6dfce8fef7b4..6cab908c7014 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py @@ -15,6 +15,8 @@ class InformixSource(CopySource): """A copy activity source for Informix. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class InformixSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class InformixSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(InformixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py new file mode 100644 index 000000000000..ed8fb0221239 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py index 7ce066b16b4d..8b7364bff652 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py @@ -15,6 +15,8 @@ class InformixTableDataset(Dataset): """The Informix table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class InformixTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class InformixTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The Informix table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class InformixTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(InformixTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py new file mode 100644 index 000000000000..05c458e797b1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py index 69e2792fda46..5dd45d16f76e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py @@ -19,12 +19,14 @@ class IntegrationRuntime(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -42,8 +44,8 @@ class IntegrationRuntime(Model): 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} } - def __init__(self, additional_properties=None, description=None): - super(IntegrationRuntime, self).__init__() - self.additional_properties = additional_properties - self.description = description + def __init__(self, **kwargs): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py index e0582ea5cdf7..12ed6925585e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py @@ -26,7 +26,7 @@ class IntegrationRuntimeAuthKeys(Model): 'auth_key2': {'key': 'authKey2', 'type': 'str'}, } - def __init__(self, auth_key1=None, auth_key2=None): - super(IntegrationRuntimeAuthKeys, self).__init__() - self.auth_key1 = auth_key1 - self.auth_key2 = auth_key2 + def __init__(self, **kwargs): + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = kwargs.get('auth_key1', None) + self.auth_key2 = kwargs.get('auth_key2', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py new file mode 100644 index 000000000000..b807d4cd5b55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = auth_key1 + self.auth_key2 = auth_key2 diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py index a88e698ca0ea..e387ef4077f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py @@ -50,11 +50,11 @@ class IntegrationRuntimeComputeProperties(Model): 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } - def __init__(self, additional_properties=None, location=None, node_size=None, number_of_nodes=None, max_parallel_executions_per_node=None, v_net_properties=None): - super(IntegrationRuntimeComputeProperties, self).__init__() - self.additional_properties = additional_properties - self.location = location - self.node_size = node_size - self.number_of_nodes = number_of_nodes - self.max_parallel_executions_per_node = max_parallel_executions_per_node - self.v_net_properties = v_net_properties + def __init__(self, **kwargs): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.v_net_properties = kwargs.get('v_net_properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py new file mode 100644 index 000000000000..f47f339dd067 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.v_net_properties = v_net_properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py index 3bdb02304d52..c185f916e8e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py @@ -59,9 +59,9 @@ class IntegrationRuntimeConnectionInfo(Model): 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, } - def __init__(self, additional_properties=None): - super(IntegrationRuntimeConnectionInfo, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.service_token = None self.identity_cert_thumbprint = None self.host_service_uri = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py new file mode 100644 index 000000000000..8cc5aceb16d7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py index b76cc5e39078..44cd5fe5979b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py @@ -27,7 +27,7 @@ class IntegrationRuntimeCustomSetupScriptProperties(Model): 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, } - def __init__(self, blob_container_uri=None, sas_token=None): - super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__() - self.blob_container_uri = blob_container_uri - self.sas_token = sas_token + def __init__(self, **kwargs): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py new file mode 100644 index 000000000000..7f3c08c0b339 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py index cf6f5ee8fb6e..ebc0e9b38d6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -30,8 +30,8 @@ class IntegrationRuntimeDataProxyProperties(Model): 'path': {'key': 'path', 'type': 'str'}, } - def __init__(self, connect_via=None, staging_linked_service=None, path=None): - super(IntegrationRuntimeDataProxyProperties, self).__init__() - self.connect_via = connect_via - self.staging_linked_service = staging_linked_service - self.path = path + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py index aa1feac333d5..f7b695729403 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py @@ -27,7 +27,7 @@ class IntegrationRuntimeMonitoringData(Model): 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, } - def __init__(self, name=None, nodes=None): - super(IntegrationRuntimeMonitoringData, self).__init__() - self.name = name - self.nodes = nodes + def __init__(self, **kwargs): + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.nodes = kwargs.get('nodes', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py new file mode 100644 index 000000000000..16f3b656c9cc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = name + self.nodes = nodes diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py index a260924f1f16..2edabd3e2472 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py @@ -30,6 +30,6 @@ class IntegrationRuntimeNodeIpAddress(Model): 'ip_address': {'key': 'ipAddress', 'type': 'str'}, } - def __init__(self): - super(IntegrationRuntimeNodeIpAddress, self).__init__() + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py new file mode 100644 index 000000000000..476be9815984 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py index 9f35cc040884..9d27bedf70aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py @@ -66,9 +66,9 @@ class IntegrationRuntimeNodeMonitoringData(Model): 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, } - def __init__(self, additional_properties=None): - super(IntegrationRuntimeNodeMonitoringData, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.node_name = None self.available_memory_in_mb = None self.cpu_utilization = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py new file mode 100644 index 000000000000..35c7e664b2ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py new file mode 100644 index 000000000000..b4056a07591b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py index 507b578a2cd8..7461d29de284 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py @@ -18,10 +18,12 @@ class IntegrationRuntimeReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: Type of integration runtime. Default value: + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: "IntegrationRuntimeReference" . :vartype type: str - :param reference_name: Reference integration runtime name. + :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. :type parameters: dict[str, object] @@ -40,7 +42,7 @@ class IntegrationRuntimeReference(Model): type = "IntegrationRuntimeReference" - def __init__(self, reference_name, parameters=None): - super(IntegrationRuntimeReference, self).__init__() - self.reference_name = reference_name - self.parameters = parameters + def __init__(self, **kwargs): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py new file mode 100644 index 000000000000..56fd3608ba61 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py index f8b4a57d8ff0..3cd91195af1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py @@ -25,6 +25,6 @@ class IntegrationRuntimeRegenerateKeyParameters(Model): 'key_name': {'key': 'keyName', 'type': 'str'}, } - def __init__(self, key_name=None): - super(IntegrationRuntimeRegenerateKeyParameters, self).__init__() - self.key_name = key_name + def __init__(self, **kwargs): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py new file mode 100644 index 000000000000..f3846cf8ec55 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, *, key_name=None, **kwargs) -> None: + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = key_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py index 8568ed26cb1e..b18f376d3698 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py @@ -18,6 +18,8 @@ class IntegrationRuntimeResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -26,7 +28,7 @@ class IntegrationRuntimeResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Integration runtime properties. + :param properties: Required. Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ @@ -46,6 +48,6 @@ class IntegrationRuntimeResource(SubResource): 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } - def __init__(self, properties): - super(IntegrationRuntimeResource, self).__init__() - self.properties = properties + def __init__(self, **kwargs): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py new file mode 100644 index 000000000000..9239f54166f9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py index 0c7e9dc74878..3399f8f38300 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py @@ -46,10 +46,10 @@ class IntegrationRuntimeSsisCatalogInfo(Model): 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, } - def __init__(self, additional_properties=None, catalog_server_endpoint=None, catalog_admin_user_name=None, catalog_admin_password=None, catalog_pricing_tier=None): - super(IntegrationRuntimeSsisCatalogInfo, self).__init__() - self.additional_properties = additional_properties - self.catalog_server_endpoint = catalog_server_endpoint - self.catalog_admin_user_name = catalog_admin_user_name - self.catalog_admin_password = catalog_admin_password - self.catalog_pricing_tier = catalog_pricing_tier + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py new file mode 100644 index 000000000000..27996bb4aeb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py index 3de1e2b2e9a3..293f071aa0b3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -49,11 +49,11 @@ class IntegrationRuntimeSsisProperties(Model): 'edition': {'key': 'edition', 'type': 'str'}, } - def __init__(self, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None): - super(IntegrationRuntimeSsisProperties, self).__init__() - self.additional_properties = additional_properties - self.catalog_info = catalog_info - self.license_type = license_type - self.custom_setup_script_properties = custom_setup_script_properties - self.data_proxy_properties = data_proxy_properties - self.edition = edition + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py new file mode 100644 index 000000000000..f75775e29a7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py index b2645851fb8f..64da6347f9ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py @@ -22,6 +22,8 @@ class IntegrationRuntimeStatus(Model): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -33,7 +35,7 @@ class IntegrationRuntimeStatus(Model): 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -54,9 +56,9 @@ class IntegrationRuntimeStatus(Model): 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} } - def __init__(self, additional_properties=None): - super(IntegrationRuntimeStatus, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.data_factory_name = None self.state = None self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py index 23d5c95fcd28..9382b4b08fde 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py @@ -15,7 +15,9 @@ class IntegrationRuntimeStatusListResponse(Model): """A list of integration runtime status. - :param value: List of integration runtime status. + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :param next_link: The link to the next page of results, if any remaining @@ -32,7 +34,7 @@ class IntegrationRuntimeStatusListResponse(Model): 'next_link': {'key': 'nextLink', 'type': 'str'}, } - def __init__(self, value, next_link=None): - super(IntegrationRuntimeStatusListResponse, self).__init__() - self.value = value - self.next_link = next_link + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py new file mode 100644 index 000000000000..bed71f74ffc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value, next_link: str=None, **kwargs) -> None: + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py new file mode 100644 index 000000000000..8541e04dc679 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_factory_name = None + self.state = None + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py index 89b045642459..901b4d8b7442 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py @@ -18,9 +18,11 @@ class IntegrationRuntimeStatusResponse(Model): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar name: The integration runtime name. :vartype name: str - :param properties: Integration runtime properties. + :param properties: Required. Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ @@ -34,7 +36,7 @@ class IntegrationRuntimeStatusResponse(Model): 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, } - def __init__(self, properties): - super(IntegrationRuntimeStatusResponse, self).__init__() + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) self.name = None - self.properties = properties + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py new file mode 100644 index 000000000000..64d84a1e4f19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py index 702723a2f067..752b5b99eb60 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py @@ -31,8 +31,8 @@ class IntegrationRuntimeVNetProperties(Model): 'subnet': {'key': 'subnet', 'type': 'str'}, } - def __init__(self, additional_properties=None, v_net_id=None, subnet=None): - super(IntegrationRuntimeVNetProperties, self).__init__() - self.additional_properties = additional_properties - self.v_net_id = v_net_id - self.subnet = subnet + def __init__(self, **kwargs): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py new file mode 100644 index 000000000000..32e8beb31ea1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py index 4f6d09869599..517cdd63caa5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -15,6 +15,8 @@ class JiraLinkedService(LinkedService): """Jira Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,16 +31,17 @@ class JiraLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The IP address or host name of the Jira service. (e.g. - jira.example.com) + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) :type host: object :param port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. :type port: object - :param username: The user name that you use to access Jira Service. + :param username: Required. The user name that you use to access Jira + Service. :type username: object :param password: The password corresponding to the user name that you provided in the username field. @@ -82,14 +85,14 @@ class JiraLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, username, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(JiraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py new file mode 100644 index 000000000000..82dc8d578da3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Jira' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py index d8533fbefd38..1c2b12c18e15 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py @@ -15,6 +15,8 @@ class JiraObjectDataset(Dataset): """Jira Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class JiraObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class JiraObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class JiraObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(JiraObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py new file mode 100644 index 000000000000..3c061b238cde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'JiraObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py index 86a0f16222da..709da0ce1205 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -15,6 +15,8 @@ class JiraSource(CopySource): """A copy activity Jira Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class JiraSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class JiraSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(JiraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py new file mode 100644 index 000000000000..c958c8351bb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'JiraSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py index 8626e76359c9..c1cee8f00b8d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset.py @@ -15,6 +15,8 @@ class JsonDataset(Dataset): """Json dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class JsonDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,9 +41,9 @@ class JsonDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param location: The location of the json data storage. + :param location: Required. The location of the json data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode @@ -75,9 +77,9 @@ class JsonDataset(Dataset): 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } - def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None): - super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.location = location - self.encoding_name = encoding_name - self.compression = compression + def __init__(self, **kwargs): + super(JsonDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression = kwargs.get('compression', None) self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py new file mode 100644 index 000000000000..564fe3bebf6b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the json data storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not + specified, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.encoding_name = encoding_name + self.compression = compression + self.type = 'Json' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py index 8a50bef90c81..80f4ff0aaf8b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -15,6 +15,8 @@ class JsonFormat(DatasetStorageFormat): """The data stored in JSON format. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is @@ -70,11 +72,11 @@ class JsonFormat(DatasetStorageFormat): 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, } - def __init__(self, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None): - super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) - self.file_pattern = file_pattern - self.nesting_separator = nesting_separator - self.encoding_name = encoding_name - self.json_node_reference = json_node_reference - self.json_path_definition = json_path_definition + def __init__(self, **kwargs): + super(JsonFormat, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.json_node_reference = kwargs.get('json_node_reference', None) + self.json_path_definition = kwargs.get('json_path_definition', None) self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py new file mode 100644 index 000000000000..2fdb44cc3b7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + self.type = 'JsonFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py index 93c9675fce43..829344338672 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink.py @@ -15,6 +15,8 @@ class JsonSink(CopySink): """A copy activity Json sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class JsonSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -60,8 +62,8 @@ class JsonSink(CopySink): 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None): - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings - self.format_settings = format_settings + def __init__(self, **kwargs): + super(JsonSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py new file mode 100644 index 000000000000..3212bb4784d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'JsonSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py index 88babd043c9d..a3349ee1d39e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source.py @@ -15,6 +15,8 @@ class JsonSource(CopySource): """A copy activity Json source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class JsonSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -48,7 +50,7 @@ class JsonSource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings + def __init__(self, **kwargs): + super(JsonSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py new file mode 100644 index 000000000000..7e5b73662801 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Json store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'JsonSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py index 726307c33016..287da9805170 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings.py @@ -15,10 +15,12 @@ class JsonWriteSettings(FormatWriteSettings): """Json write settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The write setting type. + :param type: Required. The write setting type. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is @@ -38,6 +40,6 @@ class JsonWriteSettings(FormatWriteSettings): 'file_pattern': {'key': 'filePattern', 'type': 'str'}, } - def __init__(self, type, additional_properties=None, file_pattern=None): - super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type) - self.file_pattern = file_pattern + def __init__(self, **kwargs): + super(JsonWriteSettings, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py new file mode 100644 index 000000000000..f78f57eb1187 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_write_settings_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_settings_py3 import FormatWriteSettings + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a + collection of JSON objects will be treated. The default value is + 'setOfObjects'. It is case-sensitive. Possible values include: + 'setOfObjects', 'arrayOfObjects' + :type file_pattern: str or + ~azure.mgmt.datafactory.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, file_pattern=None, **kwargs) -> None: + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.file_pattern = file_pattern diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py index 1a733032b07e..f4a4e7eb8bf0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py @@ -49,8 +49,8 @@ class LinkedIntegrationRuntime(Model): 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, } - def __init__(self): - super(LinkedIntegrationRuntime, self).__init__() + def __init__(self, **kwargs): + super(LinkedIntegrationRuntime, self).__init__(**kwargs) self.name = None self.subscription_id = None self.data_factory_name = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py index 586f4bd348f0..b7be47e8f096 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py @@ -15,9 +15,11 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): """The key authorization type integration runtime. - :param authorization_type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. :type authorization_type: str - :param key: The key used for authorization. + :param key: Required. The key used for authorization. :type key: ~azure.mgmt.datafactory.models.SecureString """ @@ -31,7 +33,7 @@ class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): 'key': {'key': 'key', 'type': 'SecureString'}, } - def __init__(self, key): - super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__() - self.key = key + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = kwargs.get('key', None) self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py new file mode 100644 index 000000000000..4a2ebd8d1003 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, *, key, **kwargs) -> None: + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = key + self.authorization_type = 'Key' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py new file mode 100644 index 000000000000..6c831ab5f511 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py index e5b1d30fe428..3fbc8dd9cac2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py @@ -16,10 +16,12 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): """The role based access control (RBAC) authorization type integration runtime. - :param authorization_type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. :type authorization_type: str - :param resource_id: The resource identifier of the integration runtime to - be shared. + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. :type resource_id: str """ @@ -33,7 +35,7 @@ class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): 'resource_id': {'key': 'resourceId', 'type': 'str'}, } - def __init__(self, resource_id): - super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__() - self.resource_id = resource_id + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py new file mode 100644 index 000000000000..055b64809e18 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, *, resource_id: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = resource_id + self.authorization_type = 'RBAC' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py index 983352e73d39..807757332b3e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py @@ -15,8 +15,10 @@ class LinkedIntegrationRuntimeRequest(Model): """Data factory name for linked integration runtime request. - :param linked_factory_name: The data factory name for linked integration - runtime. + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. :type linked_factory_name: str """ @@ -28,6 +30,6 @@ class LinkedIntegrationRuntimeRequest(Model): 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, } - def __init__(self, linked_factory_name): - super(LinkedIntegrationRuntimeRequest, self).__init__() - self.linked_factory_name = linked_factory_name + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = kwargs.get('linked_factory_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py new file mode 100644 index 000000000000..45362ab63ba3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, *, linked_factory_name: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = linked_factory_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py index a8127e3bbfd3..446395bb9cbf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py @@ -19,7 +19,9 @@ class LinkedIntegrationRuntimeType(Model): sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, LinkedIntegrationRuntimeKeyAuthorization - :param authorization_type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. :type authorization_type: str """ @@ -35,6 +37,6 @@ class LinkedIntegrationRuntimeType(Model): 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} } - def __init__(self): - super(LinkedIntegrationRuntimeType, self).__init__() + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py new file mode 100644 index 000000000000..79468dc450d2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 55eb7c94bbea..2778a33fbb5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -55,6 +55,8 @@ class LinkedService(Model): AzureTableStorageLinkedService, AzureBlobStorageLinkedService, AzureStorageLinkedService + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -69,7 +71,7 @@ class LinkedService(Model): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -90,11 +92,11 @@ class LinkedService(Model): 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(LinkedService, self).__init__() - self.additional_properties = additional_properties - self.connect_via = connect_via - self.description = description - self.parameters = parameters - self.annotations = annotations + def __init__(self, **kwargs): + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.connect_via = kwargs.get('connect_via', None) + self.description = kwargs.get('description', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py new file mode 100644 index 000000000000..2b3e475c3075 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, SapTableLinkedService, + GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, + DynamicsAXLinkedService, ResponsysLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, + RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.connect_via = connect_via + self.description = description + self.parameters = parameters + self.annotations = annotations + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py index bedc0f2d8fe1..28ffeda7d01a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py @@ -18,10 +18,12 @@ class LinkedServiceReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: Linked service reference type. Default value: + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference" . :vartype type: str - :param reference_name: Reference LinkedService name. + :param reference_name: Required. Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. :type parameters: dict[str, object] @@ -40,7 +42,7 @@ class LinkedServiceReference(Model): type = "LinkedServiceReference" - def __init__(self, reference_name, parameters=None): - super(LinkedServiceReference, self).__init__() - self.reference_name = reference_name - self.parameters = parameters + def __init__(self, **kwargs): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py new file mode 100644 index 000000000000..b6238130bdb6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py index cbbc70cbc1ce..75828718f589 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py @@ -18,6 +18,8 @@ class LinkedServiceResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -26,7 +28,7 @@ class LinkedServiceResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Properties of linked service. + :param properties: Required. Properties of linked service. :type properties: ~azure.mgmt.datafactory.models.LinkedService """ @@ -46,6 +48,6 @@ class LinkedServiceResource(SubResource): 'properties': {'key': 'properties', 'type': 'LinkedService'}, } - def __init__(self, properties): - super(LinkedServiceResource, self).__init__() - self.properties = properties + def __init__(self, **kwargs): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py new file mode 100644 index 000000000000..1fa964b51f57 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py index b09114e37957..81b4e7ca619e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py @@ -15,10 +15,13 @@ class LogStorageSettings(Model): """Log storage settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param linked_service_name: Log storage linked service reference. + :param linked_service_name: Required. Log storage linked service + reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity @@ -36,8 +39,8 @@ class LogStorageSettings(Model): 'path': {'key': 'path', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, path=None): - super(LogStorageSettings, self).__init__() - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path + def __init__(self, **kwargs): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py new file mode 100644 index 000000000000..4850b7adacdf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py index 282596c4b291..62584b2f704a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py @@ -15,10 +15,12 @@ class LookupActivity(ExecutionActivity): """Lookup activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,17 +28,17 @@ class LookupActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param source: Dataset-specific source properties, same as copy activity - source. + :param source: Required. Dataset-specific source properties, same as copy + activity source. :type source: ~azure.mgmt.datafactory.models.CopySource - :param dataset: Lookup activity dataset reference. + :param dataset: Required. Lookup activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). @@ -64,9 +66,9 @@ class LookupActivity(ExecutionActivity): 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, } - def __init__(self, name, source, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None): - super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.source = source - self.dataset = dataset - self.first_row_only = first_row_only + def __init__(self, **kwargs): + super(LookupActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.dataset = kwargs.get('dataset', None) + self.first_row_only = kwargs.get('first_row_only', None) self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py new file mode 100644 index 000000000000..41061675ebbe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: + super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.dataset = dataset + self.first_row_only = first_row_only + self.type = 'Lookup' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py index c6a79ad1cf28..9d65437b5daa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -15,6 +15,8 @@ class MagentoLinkedService(LinkedService): """Magento server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class MagentoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The URL of the Magento instance. (i.e. + :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3) :type host: object :param access_token: The access token from Magento. @@ -72,12 +74,12 @@ class MagentoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(MagentoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py new file mode 100644 index 000000000000..74de1573118b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Magento' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py index 2d8a6ec71705..ad540093ca55 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py @@ -15,6 +15,8 @@ class MagentoObjectDataset(Dataset): """Magento server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MagentoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class MagentoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class MagentoObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(MagentoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py new file mode 100644 index 000000000000..481732bb688a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MagentoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py index f3e5d1c2c385..df49fe63a544 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -15,6 +15,8 @@ class MagentoSource(CopySource): """A copy activity Magento server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MagentoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class MagentoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(MagentoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py new file mode 100644 index 000000000000..15efcc12a054 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MagentoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py index 9d750318aacd..9cbc9e94e7c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py @@ -19,12 +19,14 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: 'Initial', 'Stopped', @@ -55,9 +57,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } - def __init__(self, additional_properties=None, description=None, compute_properties=None, ssis_properties=None): - super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description) + def __init__(self, **kwargs): + super(ManagedIntegrationRuntime, self).__init__(**kwargs) self.state = None - self.compute_properties = compute_properties - self.ssis_properties = ssis_properties + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py index 642975fcf5ef..c70323697fdf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py @@ -46,9 +46,9 @@ class ManagedIntegrationRuntimeError(Model): 'message': {'key': 'message', 'type': 'str'}, } - def __init__(self, additional_properties=None): - super(ManagedIntegrationRuntimeError, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.time = None self.code = None self.parameters = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py new file mode 100644 index 000000000000..1668c5196537 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.time = None + self.code = None + self.parameters = None + self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py index 306b51ec9e45..e9c0169cf6c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py @@ -44,9 +44,9 @@ class ManagedIntegrationRuntimeNode(Model): 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, } - def __init__(self, additional_properties=None, errors=None): - super(ManagedIntegrationRuntimeNode, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.node_id = None self.status = None - self.errors = errors + self.errors = kwargs.get('errors', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py new file mode 100644 index 000000000000..0e8104d0de05 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_id = None + self.status = None + self.errors = errors diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py index 83dc66fbb496..2329f7a2ba36 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py @@ -54,9 +54,9 @@ class ManagedIntegrationRuntimeOperationResult(Model): 'activity_id': {'key': 'activityId', 'type': 'str'}, } - def __init__(self, additional_properties=None): - super(ManagedIntegrationRuntimeOperationResult, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.type = None self.start_time = None self.result = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py new file mode 100644 index 000000000000..58a80c0e600e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py new file mode 100644 index 000000000000..0e71d8b09f4e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_py3 import IntegrationRuntime + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py index 347948608e2b..17d21775f09f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py @@ -18,6 +18,8 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. @@ -67,8 +69,8 @@ class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, } - def __init__(self, additional_properties=None): - super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties) + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) self.create_time = None self.nodes = None self.other_errors = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py new file mode 100644 index 000000000000..03d9451045bd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status_py3 import IntegrationRuntimeStatus + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py index 39a73e948d06..3bbe048d4877 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -15,6 +15,8 @@ class MariaDBLinkedService(LinkedService): """MariaDB server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MariaDBLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -59,9 +61,9 @@ class MariaDBLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(MariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py new file mode 100644 index 000000000000..475284d56038 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'MariaDB' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py index 279e044aa946..a744c1c5ff8f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -15,6 +15,8 @@ class MariaDBSource(CopySource): """A copy activity MariaDB server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MariaDBSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class MariaDBSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(MariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py new file mode 100644 index 000000000000..472877b8f0bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MariaDBSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py index d20a77832970..66dc9c8ea9b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py @@ -15,6 +15,8 @@ class MariaDBTableDataset(Dataset): """MariaDB server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MariaDBTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class MariaDBTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class MariaDBTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(MariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..ac3c8cf2ea72 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MariaDBTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py index 0834db5587fd..2a9e76446122 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -15,6 +15,8 @@ class MarketoLinkedService(LinkedService): """Marketo server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,12 +31,12 @@ class MarketoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the Marketo server. (i.e. + :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) :type endpoint: object - :param client_id: The client Id of your Marketo service. + :param client_id: Required. The client Id of your Marketo service. :type client_id: object :param client_secret: The client secret of your Marketo service. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase @@ -76,13 +78,13 @@ class MarketoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(MarketoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py new file mode 100644 index 000000000000..dc326f24acd5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Marketo' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py index 281992bf1a05..63daa10047b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py @@ -15,6 +15,8 @@ class MarketoObjectDataset(Dataset): """Marketo server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MarketoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class MarketoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class MarketoObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(MarketoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py new file mode 100644 index 000000000000..7179d5af53dd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MarketoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py index 907fb4a27a8e..6d2061ef0dee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -15,6 +15,8 @@ class MarketoSource(CopySource): """A copy activity Marketo server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MarketoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class MarketoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(MarketoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py new file mode 100644 index 000000000000..573dc0439754 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MarketoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py index ae9755b667fa..b53164f6266b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py @@ -15,6 +15,8 @@ class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class MicrosoftAccessLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The non-access credential portion of the - connection string as well as an optional encrypted credential. Type: + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the @@ -73,12 +75,12 @@ class MicrosoftAccessLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None): - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py new file mode 100644 index 000000000000..c9f79c24adf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py index c8b127128c15..53406fa25022 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py @@ -15,6 +15,8 @@ class MicrosoftAccessSink(CopySink): """A copy activity Microsoft Access sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -58,7 +60,7 @@ class MicrosoftAccessSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script + def __init__(self, **kwargs): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py new file mode 100644 index 000000000000..700db840c03d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py index e15b6a7dbac3..73cd3a64184c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py @@ -15,6 +15,8 @@ class MicrosoftAccessSource(CopySource): """A copy activity source for Microsoft Access. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MicrosoftAccessSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class MicrosoftAccessSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py new file mode 100644 index 000000000000..1cccd82c8b19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py index 637534dbd571..f312dae024f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py @@ -15,6 +15,8 @@ class MicrosoftAccessTableDataset(Dataset): """The Microsoft Access table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MicrosoftAccessTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class MicrosoftAccessTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py new file mode 100644 index 000000000000..3fad904ef58b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py index aeb7ceee6dc5..796c5e14eaca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py @@ -15,6 +15,8 @@ class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MongoDbCollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class MongoDbCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param collection_name: The table name of the MongoDB database. Type: - string (or Expression with resultType string). + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). :type collection_name: object """ @@ -65,7 +67,7 @@ class MongoDbCollectionDataset(Dataset): 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } - def __init__(self, linked_service_name, collection_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.collection_name = collection_name + def __init__(self, **kwargs): + super(MongoDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py new file mode 100644 index 000000000000..68fe2affb0e4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'MongoDbCollection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py index a2c1fd77cc23..a2d2127d1397 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -44,10 +44,10 @@ class MongoDbCursorMethodsProperties(Model): 'limit': {'key': 'limit', 'type': 'object'}, } - def __init__(self, additional_properties=None, project=None, sort=None, skip=None, limit=None): - super(MongoDbCursorMethodsProperties, self).__init__() - self.additional_properties = additional_properties - self.project = project - self.sort = sort - self.skip = skip - self.limit = limit + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py new file mode 100644 index 000000000000..e1e3f50d1539 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py index ce0c34dfdb77..76d162b0ff70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -15,6 +15,8 @@ class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,17 +31,17 @@ class MongoDbLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param server: The IP address or server name of the MongoDB server. Type: - string (or Expression with resultType string). + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: 'Basic', 'Anonymous' :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType - :param database_name: The name of the MongoDB database that you want to - access. Type: string (or Expression with resultType string). + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). :type database_name: object :param username: Username for authentication. Type: string (or Expression with resultType string). @@ -92,16 +94,16 @@ class MongoDbLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, database_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None): - super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.authentication_type = authentication_type - self.database_name = database_name - self.username = username - self.password = password - self.auth_source = auth_source - self.port = port - self.enable_ssl = enable_ssl - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(MongoDbLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.database_name = kwargs.get('database_name', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.auth_source = kwargs.get('auth_source', None) + self.port = kwargs.get('port', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py new file mode 100644 index 000000000000..95308b6ea8f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.authentication_type = authentication_type + self.database_name = database_name + self.username = username + self.password = password + self.auth_source = auth_source + self.port = port + self.enable_ssl = enable_ssl + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'MongoDb' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py index f044c74e1e06..3da4b931f5e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -15,6 +15,8 @@ class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MongoDbSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class MongoDbSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(MongoDbSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py new file mode 100644 index 000000000000..ab3e5b6e0cc9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MongoDbSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py index b50597dad249..17089373d4c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -15,6 +15,8 @@ class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MongoDbV2CollectionDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class MongoDbV2CollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param collection: The collection name of the MongoDB database. Type: - string (or Expression with resultType string). + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). :type collection: object """ @@ -65,7 +67,7 @@ class MongoDbV2CollectionDataset(Dataset): 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } - def __init__(self, linked_service_name, collection, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.collection = collection + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py new file mode 100644 index 000000000000..ad1e5c538645 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py index 8fd966440047..bb29fc767420 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -15,6 +15,8 @@ class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,14 +31,14 @@ class MongoDbV2LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The MongoDB connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString - or AzureKeyVaultSecretReference. + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param database: The name of the MongoDB database that you want to access. - Type: string (or Expression with resultType string). + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). :type database: object """ @@ -57,8 +59,8 @@ class MongoDbV2LinkedService(LinkedService): 'database': {'key': 'typeProperties.database', 'type': 'object'}, } - def __init__(self, connection_string, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.database = database + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py new file mode 100644 index 000000000000..d1388ce797a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py index 3606e04e1878..e951674a8e22 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -15,6 +15,8 @@ class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MongoDbV2Source(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty @@ -61,9 +63,9 @@ class MongoDbV2Source(CopySource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None): - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.filter = filter - self.cursor_methods = cursor_methods - self.batch_size = batch_size + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py new file mode 100644 index 000000000000..9b8eec114a06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property's + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py index c4b0edd8c166..1be28aa1b6ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -22,6 +22,8 @@ class MultiplePipelineTrigger(Trigger): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -35,7 +37,7 @@ class MultiplePipelineTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: @@ -60,7 +62,7 @@ class MultiplePipelineTrigger(Trigger): 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } - def __init__(self, additional_properties=None, description=None, annotations=None, pipelines=None): - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations) - self.pipelines = pipelines + def __init__(self, **kwargs): + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.pipelines = kwargs.get('pipelines', None) self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py new file mode 100644 index 000000000000..206ab74ef419 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipelines = pipelines + self.type = 'MultiplePipelineTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py index cbc8a7dd5768..ec85b0136714 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -15,6 +15,8 @@ class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class MySqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. + :param connection_string: Required. The connection string. :type connection_string: ~azure.mgmt.datafactory.models.SecretBase :param password: The Azure key vault secret reference of password in connection string. @@ -60,9 +62,9 @@ class MySqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(MySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py new file mode 100644 index 000000000000..b8038df22fd6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MySql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py index 5c7fe726220f..c2b0b66eabb1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source.py @@ -15,6 +15,8 @@ class MySqlSource(CopySource): """A copy activity source for MySQL databases. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class MySqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class MySqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(MySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py new file mode 100644 index 000000000000..3a0315d83979 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MySqlSource(CopySource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MySqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py index 52917a98abde..3bb1584975d5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset.py @@ -15,6 +15,8 @@ class MySqlTableDataset(Dataset): """The MySQL table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class MySqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class MySqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The MySQL table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class MySqlTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(MySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..33263561dfde --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The MySQL table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MySqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py index 9b82dfd4114e..5d94bdecaf62 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -15,6 +15,8 @@ class NetezzaLinkedService(LinkedService): """Netezza linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class NetezzaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -59,9 +61,9 @@ class NetezzaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): - super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(NetezzaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py new file mode 100644 index 000000000000..2fcc288fd5b7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Netezza' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py index b1845b17e610..b6c1ca9ba5da 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings.py @@ -35,8 +35,8 @@ class NetezzaPartitionSettings(Model): 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - def __init__(self, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None): - super(NetezzaPartitionSettings, self).__init__() - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound + def __init__(self, **kwargs): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py new file mode 100644 index 000000000000..9f071eae60ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetezzaPartitionSettings(Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py index bf0d141b4487..3c66032bf48d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -15,6 +15,8 @@ class NetezzaSource(CopySource): """A copy activity Netezza source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class NetezzaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -60,9 +62,9 @@ class NetezzaSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None): - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings + def __init__(self, **kwargs): + super(NetezzaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py new file mode 100644 index 000000000000..f5dcc07e63d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for + Netezza read in parallel. Possible values include: 'None', 'DataSlice', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'NetezzaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py index 65702c04dc05..b7807273262b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py @@ -15,6 +15,8 @@ class NetezzaTableDataset(Dataset): """Netezza dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class NetezzaTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class NetezzaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class NetezzaTableDataset(Dataset): 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None): - super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.netezza_table_dataset_schema = netezza_table_dataset_schema + def __init__(self, **kwargs): + super(NetezzaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.netezza_table_dataset_schema = kwargs.get('netezza_table_dataset_schema', None) self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py new file mode 100644 index 000000000000..29dd448ada75 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression + with resultType string). + :type table: object + :param netezza_table_dataset_schema: The schema name of the Netezza. Type: + string (or Expression with resultType string). + :type netezza_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.netezza_table_dataset_schema = netezza_table_dataset_schema + self.type = 'NetezzaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py index b20a11ad9530..01db8d71e924 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -15,6 +15,8 @@ class ODataLinkedService(LinkedService): """Open Data Protocol (OData) linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class ODataLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The URL of the OData service endpoint. Type: string (or - Expression with resultType string). + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', @@ -108,18 +110,18 @@ class ODataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None): - super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.tenant = tenant - self.service_principal_id = service_principal_id - self.aad_resource_id = aad_resource_id - self.aad_service_principal_credential_type = aad_service_principal_credential_type - self.service_principal_key = service_principal_key - self.service_principal_embedded_cert = service_principal_embedded_cert - self.service_principal_embedded_cert_password = service_principal_embedded_cert_password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ODataLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py new file mode 100644 index 000000000000..fcf2d8bb9819 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: + super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password + self.encrypted_credential = encrypted_credential + self.type = 'OData' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py index d81ec54fd561..658cf40c8d2b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py @@ -15,6 +15,8 @@ class ODataResourceDataset(Dataset): """The Open Data Protocol (OData) resource dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ODataResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ODataResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param path: The OData resource path. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class ODataResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None): - super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.path = path + def __init__(self, **kwargs): + super(ODataResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py new file mode 100644 index 000000000000..5951a2cf6d80 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'ODataResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py index b9c4e8ea9141..c70f440ff6cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source.py @@ -15,6 +15,8 @@ class ODataSource(CopySource): """A copy activity source for OData source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ODataSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ODataSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ODataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py new file mode 100644 index 000000000000..83ba9bd7f2af --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ODataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py index 275cb7682dcf..53d21dee2def 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -15,6 +15,8 @@ class OdbcLinkedService(LinkedService): """Open Database Connectivity (ODBC) linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class OdbcLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The non-access credential portion of the - connection string as well as an optional encrypted credential. Type: + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the @@ -73,12 +75,12 @@ class OdbcLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None): - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(OdbcLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py new file mode 100644 index 000000000000..2e376d23c67a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Odbc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py index 8c3535545ff8..ced7e1dbd9e4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -15,6 +15,8 @@ class OdbcSink(CopySink): """A copy activity ODBC sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). @@ -58,7 +60,7 @@ class OdbcSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script + def __init__(self, **kwargs): + super(OdbcSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py new file mode 100644 index 000000000000..9a181f8df7e9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OdbcSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py index c35a8b747ab2..9761d0c0aeb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source.py @@ -15,6 +15,8 @@ class OdbcSource(CopySource): """A copy activity source for ODBC databases. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class OdbcSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class OdbcSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(OdbcSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py new file mode 100644 index 000000000000..52b059a8ad91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OdbcSource(CopySource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OdbcSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py index fd2652eebc3d..2f4f4261f4fc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset.py @@ -15,6 +15,8 @@ class OdbcTableDataset(Dataset): """The ODBC table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class OdbcTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class OdbcTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The ODBC table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class OdbcTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(OdbcTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py new file mode 100644 index 000000000000..070ddccd180d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OdbcTableDataset(Dataset): + """The ODBC table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The ODBC table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OdbcTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py index f8ca4d6bf142..baa90666d669 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -15,6 +15,8 @@ class Office365Dataset(Dataset): """The Office365 account. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class Office365Dataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class Office365Dataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param table_name: Name of the dataset to extract from Office 365. Type: - string (or Expression with resultType string). + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). :type table_name: object :param predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with @@ -70,8 +72,8 @@ class Office365Dataset(Dataset): 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, } - def __init__(self, linked_service_name, table_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None): - super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.predicate = predicate + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py new file mode 100644 index 000000000000..5517f7daf9e3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py index 417dd66f710c..2dc98897482a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -15,6 +15,8 @@ class Office365LinkedService(LinkedService): """Office365 linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,19 +31,19 @@ class Office365LinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param office365_tenant_id: Azure tenant ID to which the Office 365 - account belongs. Type: string (or Expression with resultType string). + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). :type office365_tenant_id: object - :param service_principal_tenant_id: Specify the tenant information under - which your Azure AD web application resides. Type: string (or Expression - with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Specify the application's client ID. Type: + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: Specify the application's key. + :param service_principal_key: Required. Specify the application's key. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime @@ -71,11 +73,11 @@ class Office365LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, encrypted_credential=None): - super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.office365_tenant_id = office365_tenant_id - self.service_principal_tenant_id = service_principal_tenant_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py new file mode 100644 index 000000000000..5a69c0d895fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py index d180e8e5584d..de19818aaa7f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -15,6 +15,8 @@ class Office365Source(CopySource): """A copy activity source for an Office365 service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class Office365Source(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). @@ -66,11 +68,11 @@ class Office365Source(CopySource): 'end_time': {'key': 'endTime', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None): - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.allowed_groups = allowed_groups - self.user_scope_filter_uri = user_scope_filter_uri - self.date_filter_column = date_filter_column - self.start_time = start_time - self.end_time = end_time + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py new file mode 100644 index 000000000000..fc2c4b095904 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param allowed_groups: The groups containing all the users. Type: array of + strings (or Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or + Expression with resultType string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the and . Type: string (or + Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. + Type: string (or Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: + string (or Expression with resultType string). + :type end_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time + self.type = 'Office365Source' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py index dfaf8d979082..db8cde8db784 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py @@ -33,9 +33,9 @@ class Operation(Model): 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, } - def __init__(self, name=None, origin=None, display=None, service_specification=None): - super(Operation, self).__init__() - self.name = name - self.origin = origin - self.display = display - self.service_specification = service_specification + def __init__(self, **kwargs): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.origin = kwargs.get('origin', None) + self.display = kwargs.get('display', None) + self.service_specification = kwargs.get('service_specification', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py index 44a481206fb6..1d96541c0581 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py @@ -33,9 +33,9 @@ class OperationDisplay(Model): 'operation': {'key': 'operation', 'type': 'str'}, } - def __init__(self, description=None, provider=None, resource=None, operation=None): - super(OperationDisplay, self).__init__() - self.description = description - self.provider = provider - self.resource = resource - self.operation = operation + def __init__(self, **kwargs): + super(OperationDisplay, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py new file mode 100644 index 000000000000..dfbb782627f4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: + super(OperationDisplay, self).__init__(**kwargs) + self.description = description + self.provider = provider + self.resource = resource + self.operation = operation diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py index 304707a33606..93bfaf4ed0de 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py @@ -30,8 +30,8 @@ class OperationLogSpecification(Model): 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, } - def __init__(self, name=None, display_name=None, blob_duration=None): - super(OperationLogSpecification, self).__init__() - self.name = name - self.display_name = display_name - self.blob_duration = blob_duration + def __init__(self, **kwargs): + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.blob_duration = kwargs.get('blob_duration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py new file mode 100644 index 000000000000..2cdd941fab7b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py index 2e2053aedca7..974e0cbf4b0b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py @@ -27,7 +27,7 @@ class OperationMetricAvailability(Model): 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, } - def __init__(self, time_grain=None, blob_duration=None): - super(OperationMetricAvailability, self).__init__() - self.time_grain = time_grain - self.blob_duration = blob_duration + def __init__(self, **kwargs): + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = kwargs.get('time_grain', None) + self.blob_duration = kwargs.get('blob_duration', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py new file mode 100644 index 000000000000..312b83a23701 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = time_grain + self.blob_duration = blob_duration diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py index c71da5e1ed38..24232e7b5470 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py @@ -30,8 +30,8 @@ class OperationMetricDimension(Model): 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, } - def __init__(self, name=None, display_name=None, to_be_exported_for_shoebox=None): - super(OperationMetricDimension, self).__init__() - self.name = name - self.display_name = display_name - self.to_be_exported_for_shoebox = to_be_exported_for_shoebox + def __init__(self, **kwargs): + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py new file mode 100644 index 000000000000..1d8610b7fab8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.to_be_exported_for_shoebox = to_be_exported_for_shoebox diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py index dd37143a6ce6..77f533fdcebf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py @@ -54,15 +54,15 @@ class OperationMetricSpecification(Model): 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, } - def __init__(self, name=None, display_name=None, display_description=None, unit=None, aggregation_type=None, enable_regional_mdm_account=None, source_mdm_account=None, source_mdm_namespace=None, availabilities=None, dimensions=None): - super(OperationMetricSpecification, self).__init__() - self.name = name - self.display_name = display_name - self.display_description = display_description - self.unit = unit - self.aggregation_type = aggregation_type - self.enable_regional_mdm_account = enable_regional_mdm_account - self.source_mdm_account = source_mdm_account - self.source_mdm_namespace = source_mdm_namespace - self.availabilities = availabilities - self.dimensions = dimensions + def __init__(self, **kwargs): + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.display_description = kwargs.get('display_description', None) + self.unit = kwargs.get('unit', None) + self.aggregation_type = kwargs.get('aggregation_type', None) + self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) + self.source_mdm_account = kwargs.get('source_mdm_account', None) + self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) + self.availabilities = kwargs.get('availabilities', None) + self.dimensions = kwargs.get('dimensions', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py new file mode 100644 index 000000000000..c1cc4ad39e72 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.aggregation_type = aggregation_type + self.enable_regional_mdm_account = enable_regional_mdm_account + self.source_mdm_account = source_mdm_account + self.source_mdm_namespace = source_mdm_namespace + self.availabilities = availabilities + self.dimensions = dimensions diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py new file mode 100644 index 000000000000..23305038a090 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: + super(Operation, self).__init__(**kwargs) + self.name = name + self.origin = origin + self.display = display + self.service_specification = service_specification diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py index 26cac12bec97..82622a44af5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py @@ -28,7 +28,7 @@ class OperationServiceSpecification(Model): 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, } - def __init__(self, log_specifications=None, metric_specifications=None): - super(OperationServiceSpecification, self).__init__() - self.log_specifications = log_specifications - self.metric_specifications = metric_specifications + def __init__(self, **kwargs): + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = kwargs.get('log_specifications', None) + self.metric_specifications = kwargs.get('metric_specifications', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py new file mode 100644 index 000000000000..4215dac6eb7f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py index 8fd0b5ebf775..19f715dfd9e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -15,6 +15,8 @@ class OracleLinkedService(LinkedService): """Oracle database. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class OracleLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in @@ -61,9 +63,9 @@ class OracleLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): - super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(OracleLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py new file mode 100644 index 000000000000..a46f0463afb5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Oracle' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py index d7b70b724d4f..b4e9aa1b92f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -38,9 +38,9 @@ class OraclePartitionSettings(Model): 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - def __init__(self, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None): - super(OraclePartitionSettings, self).__init__() - self.partition_names = partition_names - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..10641aab7f9f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py index 89921338fc5a..44ce000868b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -15,6 +15,8 @@ class OracleServiceCloudLinkedService(LinkedService): """Oracle Service Cloud linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,15 +31,15 @@ class OracleServiceCloudLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The URL of the Oracle Service Cloud instance. + :param host: Required. The URL of the Oracle Service Cloud instance. :type host: object - :param username: The user name that you use to access Oracle Service Cloud - server. + :param username: Required. The user name that you use to access Oracle + Service Cloud server. :type username: object - :param password: The password corresponding to the user name that you - provided in the username key. + :param password: Required. The password corresponding to the user name + that you provided in the username key. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: @@ -81,13 +83,13 @@ class OracleServiceCloudLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, username, password, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..8732e2e82ca0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py index 19a2727042e1..35ce3439d8a0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -15,6 +15,8 @@ class OracleServiceCloudObjectDataset(Dataset): """Oracle Service Cloud dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class OracleServiceCloudObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class OracleServiceCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class OracleServiceCloudObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..a478e1abc828 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py index 32097516792e..f42291941393 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -15,6 +15,8 @@ class OracleServiceCloudSource(CopySource): """A copy activity Oracle Service Cloud source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class OracleServiceCloudSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class OracleServiceCloudSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py new file mode 100644 index 000000000000..1fa5d6eb3748 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py index 879a6a86e771..1f6c747c49db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -15,6 +15,8 @@ class OracleSink(CopySink): """A copy activity Oracle sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). @@ -58,7 +60,7 @@ class OracleSink(CopySink): 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None): - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script + def __init__(self, **kwargs): + super(OracleSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py new file mode 100644 index 000000000000..3a571c66732a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OracleSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 9ba74b694dc4..db436192eca1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -15,6 +15,8 @@ class OracleSource(CopySource): """A copy activity Oracle source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class OracleSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). @@ -65,10 +67,10 @@ class OracleSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None): - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.oracle_reader_query = oracle_reader_query - self.query_timeout = query_timeout - self.partition_option = partition_option - self.partition_settings = partition_settings + def __init__(self, **kwargs): + super(OracleSource, self).__init__(**kwargs) + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py new file mode 100644 index 000000000000..0a871809896e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + Oracle read in parallel. Possible values include: 'None', + 'PhysicalPartitionsOfTable', 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'OracleSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py index 867e01cd5174..c76b5ced3e5c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -15,6 +15,8 @@ class OracleTableDataset(Dataset): """The on-premises Oracle database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class OracleTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class OracleTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class OracleTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None): - super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.oracle_table_dataset_schema = oracle_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(OracleTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.oracle_table_dataset_schema = kwargs.get('oracle_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py new file mode 100644 index 000000000000..b588fbac5244 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param oracle_table_dataset_schema: The schema name of the on-premises + Oracle database. Type: string (or Expression with resultType string). + :type oracle_table_dataset_schema: object + :param table: The table name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: + super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.oracle_table_dataset_schema = oracle_table_dataset_schema + self.table = table + self.type = 'OracleTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py index b91f4d5952f7..8f0a0322062c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py @@ -15,6 +15,8 @@ class OrcFormat(DatasetStorageFormat): """The data stored in Optimized Row Columnar (ORC) format. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class OrcFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -32,6 +34,13 @@ class OrcFormat(DatasetStorageFormat): 'type': {'required': True}, } - def __init__(self, additional_properties=None, serializer=None, deserializer=None): - super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OrcFormat, self).__init__(**kwargs) self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py new file mode 100644 index 000000000000..40a0e389ccc3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'OrcFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py index 5747dc8efedf..aef855d955f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py @@ -15,8 +15,10 @@ class ParameterSpecification(Model): """Definition of a single parameter for an entity. - :param type: Parameter type. Possible values include: 'Object', 'String', - 'Int', 'Float', 'Bool', 'Array', 'SecureString' + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. :type default_value: object @@ -31,7 +33,7 @@ class ParameterSpecification(Model): 'default_value': {'key': 'defaultValue', 'type': 'object'}, } - def __init__(self, type, default_value=None): - super(ParameterSpecification, self).__init__() - self.type = type - self.default_value = default_value + def __init__(self, **kwargs): + super(ParameterSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py new file mode 100644 index 000000000000..d5b6f981d365 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(ParameterSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py index 44f0516308be..ffaf8e1f6d93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -15,6 +15,8 @@ class ParquetDataset(Dataset): """Parquet dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ParquetDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,9 +41,9 @@ class ParquetDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param location: The location of the parquet storage. + :param location: Required. The location of the parquet storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression_codec: :type compression_codec: object @@ -67,8 +69,8 @@ class ParquetDataset(Dataset): 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } - def __init__(self, linked_service_name, location, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None): - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.location = location - self.compression_codec = compression_codec + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py index 4d953b3a4e78..d742ff24b522 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py @@ -15,6 +15,8 @@ class ParquetFormat(DatasetStorageFormat): """The data stored in Parquet format. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class ParquetFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -32,6 +34,13 @@ class ParquetFormat(DatasetStorageFormat): 'type': {'required': True}, } - def __init__(self, additional_properties=None, serializer=None, deserializer=None): - super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ParquetFormat, self).__init__(**kwargs) self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py new file mode 100644 index 000000000000..36a6f5c88c4d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'ParquetFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py index fc6f7a5ac0fb..dea3e0f8fc52 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -15,6 +15,8 @@ class ParquetSink(CopySink): """A copy activity Parquet sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings @@ -57,7 +59,7 @@ class ParquetSink(CopySink): 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None): - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..463044fef83f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py index 02f7d1dea135..ab888c7361a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -15,6 +15,8 @@ class ParquetSource(CopySource): """A copy activity Parquet source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ParquetSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings @@ -48,7 +50,7 @@ class ParquetSource(CopySource): 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None): - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.store_settings = store_settings + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..332a7b9b8c5e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py index 1f9ea0741517..d7ae0bc075e7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -15,6 +15,8 @@ class PaypalLinkedService(LinkedService): """Paypal Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,11 +31,13 @@ class PaypalLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) :type host: object - :param client_id: The client ID associated with your PayPal application. + :param client_id: Required. The client ID associated with your PayPal + application. :type client_id: object :param client_secret: The client secret associated with your PayPal application. @@ -76,13 +80,13 @@ class PaypalLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(PaypalLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py new file mode 100644 index 000000000000..c11cda7a52f3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Paypal' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py index a7f1ef9964d6..d0fdc678841b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py @@ -15,6 +15,8 @@ class PaypalObjectDataset(Dataset): """Paypal Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class PaypalObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class PaypalObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class PaypalObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(PaypalObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py new file mode 100644 index 000000000000..55df7c97166d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PaypalObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py index cd1770c81c9e..94cdbccae6ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -15,6 +15,8 @@ class PaypalSource(CopySource): """A copy activity Paypal Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class PaypalSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class PaypalSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(PaypalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py new file mode 100644 index 000000000000..05730d0ae067 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PaypalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py index da390e8f0d34..308a8e4cf592 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -15,6 +15,8 @@ class PhoenixLinkedService(LinkedService): """Phoenix server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class PhoenixLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The IP address or host name of the Phoenix server. (i.e. - 192.168.222.160) + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) :type host: object :param port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. @@ -41,8 +43,8 @@ class PhoenixLinkedService(LinkedService): (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. :type http_path: object - :param authentication_type: The authentication mechanism used to connect - to the Phoenix server. Possible values include: 'Anonymous', + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType @@ -102,18 +104,18 @@ class PhoenixLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): - super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(PhoenixLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py new file mode 100644 index 000000000000..de8210c2cc89 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Phoenix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py index 463d37b3c932..ccaa2eb49abd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py @@ -15,6 +15,8 @@ class PhoenixObjectDataset(Dataset): """Phoenix server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class PhoenixObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class PhoenixObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class PhoenixObjectDataset(Dataset): 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None): - super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.phoenix_object_dataset_schema = phoenix_object_dataset_schema + def __init__(self, **kwargs): + super(PhoenixObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.phoenix_object_dataset_schema = kwargs.get('phoenix_object_dataset_schema', None) self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py new file mode 100644 index 000000000000..cda4dc41dc22 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression + with resultType string). + :type table: object + :param phoenix_object_dataset_schema: The schema name of the Phoenix. + Type: string (or Expression with resultType string). + :type phoenix_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: + super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.phoenix_object_dataset_schema = phoenix_object_dataset_schema + self.type = 'PhoenixObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py index 5728762329ac..30171c6177ff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -15,6 +15,8 @@ class PhoenixSource(CopySource): """A copy activity Phoenix server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class PhoenixSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class PhoenixSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(PhoenixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py new file mode 100644 index 000000000000..1384f59e1aa4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PhoenixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py index 3e66bad233e0..bebc05cb1824 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py @@ -24,6 +24,6 @@ class PipelineFolder(Model): 'name': {'key': 'name', 'type': 'str'}, } - def __init__(self, name=None): - super(PipelineFolder, self).__init__() - self.name = name + def __init__(self, **kwargs): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py new file mode 100644 index 000000000000..02c9b8dbbff1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(PipelineFolder, self).__init__(**kwargs) + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py index 1d39beea8145..aa8b23e62932 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py @@ -18,9 +18,12 @@ class PipelineReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: Pipeline reference type. Default value: "PipelineReference" . + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . :vartype type: str - :param reference_name: Reference pipeline name. + :param reference_name: Required. Reference pipeline name. :type reference_name: str :param name: Reference name. :type name: str @@ -39,7 +42,7 @@ class PipelineReference(Model): type = "PipelineReference" - def __init__(self, reference_name, name=None): - super(PipelineReference, self).__init__() - self.reference_name = reference_name - self.name = name + def __init__(self, **kwargs): + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.name = kwargs.get('name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py new file mode 100644 index 000000000000..ce63f06092d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.name = name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py index e59567d8037d..a39deaccc87b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py @@ -72,13 +72,13 @@ class PipelineResource(SubResource): 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } - def __init__(self, additional_properties=None, description=None, activities=None, parameters=None, variables=None, concurrency=None, annotations=None, folder=None): - super(PipelineResource, self).__init__() - self.additional_properties = additional_properties - self.description = description - self.activities = activities - self.parameters = parameters - self.variables = variables - self.concurrency = concurrency - self.annotations = annotations - self.folder = folder + def __init__(self, **kwargs): + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.activities = kwargs.get('activities', None) + self.parameters = kwargs.get('parameters', None) + self.variables = kwargs.get('variables', None) + self.concurrency = kwargs.get('concurrency', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py new file mode 100644 index 000000000000..8299cdb73887 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.activities = activities + self.parameters = parameters + self.variables = variables + self.concurrency = concurrency + self.annotations = annotations + self.folder = folder diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py index bb28998cf999..a2407bd9835f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -82,9 +82,9 @@ class PipelineRun(Model): 'message': {'key': 'message', 'type': 'str'}, } - def __init__(self, additional_properties=None): - super(PipelineRun, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.run_id = None self.run_group_id = None self.is_latest = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py index dc8d3ae847c4..acefb80fd078 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py @@ -38,8 +38,8 @@ class PipelineRunInvokedBy(Model): 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, } - def __init__(self): - super(PipelineRunInvokedBy, self).__init__() + def __init__(self, **kwargs): + super(PipelineRunInvokedBy, self).__init__(**kwargs) self.name = None self.id = None self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py new file mode 100644 index 000000000000..c954a18b8a67 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py new file mode 100644 index 000000000000..33e0f23f24ac --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py index 382ca47e7c41..c4591c5467ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py @@ -15,7 +15,9 @@ class PipelineRunsQueryResponse(Model): """A list pipeline runs. - :param value: List of pipeline runs. + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. :type value: list[~azure.mgmt.datafactory.models.PipelineRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -31,7 +33,7 @@ class PipelineRunsQueryResponse(Model): 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } - def __init__(self, value, continuation_token=None): - super(PipelineRunsQueryResponse, self).__init__() - self.value = value - self.continuation_token = continuation_token + def __init__(self, **kwargs): + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py new file mode 100644 index 000000000000..fbc689ec1632 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py index 0055d320b598..5a261d8fea84 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py @@ -44,10 +44,10 @@ class PolybaseSettings(Model): 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, } - def __init__(self, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None): - super(PolybaseSettings, self).__init__() - self.additional_properties = additional_properties - self.reject_type = reject_type - self.reject_value = reject_value - self.reject_sample_value = reject_sample_value - self.use_type_default = use_type_default + def __init__(self, **kwargs): + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reject_type = kwargs.get('reject_type', None) + self.reject_value = kwargs.get('reject_value', None) + self.reject_sample_value = kwargs.get('reject_sample_value', None) + self.use_type_default = kwargs.get('use_type_default', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py new file mode 100644 index 000000000000..baae78b14c5f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py index 3203c1f12658..f8ce5bd0803e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -15,6 +15,8 @@ class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class PostgreSqlLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. + :param connection_string: Required. The connection string. :type connection_string: ~azure.mgmt.datafactory.models.SecretBase :param password: The Azure key vault secret reference of password in connection string. @@ -60,9 +62,9 @@ class PostgreSqlLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, password=None, encrypted_credential=None): - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(PostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py new file mode 100644 index 000000000000..0221aa620064 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'PostgreSql' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py index d0d7bbf3d398..51dd25b25c60 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source.py @@ -15,6 +15,8 @@ class PostgreSqlSource(CopySource): """A copy activity source for PostgreSQL databases. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class PostgreSqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class PostgreSqlSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(PostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py new file mode 100644 index 000000000000..8aa12e4bdf8d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PostgreSqlSource(CopySource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PostgreSqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py index 03716ef39975..031a2479815b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset.py @@ -15,6 +15,8 @@ class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class PostgreSqlTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class PostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class PostgreSqlTableDataset(Dataset): 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None): - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema + def __init__(self, **kwargs): + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.postgre_sql_table_dataset_schema = kwargs.get('postgre_sql_table_dataset_schema', None) self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..8adb7bd409ea --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with + resultType string). + :type table: object + :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: + string (or Expression with resultType string). + :type postgre_sql_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema + self.type = 'PostgreSqlTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py index b209ac5cc3c1..21f18f07b262 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -15,6 +15,8 @@ class PrestoLinkedService(LinkedService): """Presto server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,20 +31,22 @@ class PrestoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The IP address or host name of the Presto server. (i.e. - 192.168.222.160) + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) :type host: object - :param server_version: The version of the Presto server. (i.e. 0.148-t) + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) :type server_version: object - :param catalog: The catalog context for all request against the server. + :param catalog: Required. The catalog context for all request against the + server. :type catalog: object :param port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. :type port: object - :param authentication_type: The authentication mechanism used to connect - to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. @@ -109,20 +113,20 @@ class PrestoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None): - super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.server_version = server_version - self.catalog = catalog - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.time_zone_id = time_zone_id - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(PrestoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.server_version = kwargs.get('server_version', None) + self.catalog = kwargs.get('catalog', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.time_zone_id = kwargs.get('time_zone_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py new file mode 100644 index 000000000000..75ab99d5a58f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py @@ -0,0 +1,132 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: + super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.server_version = server_version + self.catalog = catalog + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.time_zone_id = time_zone_id + self.encrypted_credential = encrypted_credential + self.type = 'Presto' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py index ca98e4488fb9..eb80e1a97750 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py @@ -15,6 +15,8 @@ class PrestoObjectDataset(Dataset): """Presto server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class PrestoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class PrestoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class PrestoObjectDataset(Dataset): 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None): - super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.presto_object_dataset_schema = presto_object_dataset_schema + def __init__(self, **kwargs): + super(PrestoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.presto_object_dataset_schema = kwargs.get('presto_object_dataset_schema', None) self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py new file mode 100644 index 000000000000..e3bd2f7e36aa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression + with resultType string). + :type table: object + :param presto_object_dataset_schema: The schema name of the Presto. Type: + string (or Expression with resultType string). + :type presto_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: + super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.presto_object_dataset_schema = presto_object_dataset_schema + self.type = 'PrestoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py index 0c35009501f0..9b7274011265 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -15,6 +15,8 @@ class PrestoSource(CopySource): """A copy activity Presto server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class PrestoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class PrestoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(PrestoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py new file mode 100644 index 000000000000..47fe3eb5f790 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PrestoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py index a5d68a8db088..6353c1cda96a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -15,6 +15,8 @@ class QuickBooksLinkedService(LinkedService): """QuickBooks server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,21 +31,25 @@ class QuickBooksLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the QuickBooks server. (i.e. + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) :type endpoint: object - :param company_id: The company ID of the QuickBooks company to authorize. + :param company_id: Required. The company ID of the QuickBooks company to + authorize. :type company_id: object - :param consumer_key: The consumer key for OAuth 1.0 authentication. + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. :type consumer_key: object - :param consumer_secret: The consumer secret for OAuth 1.0 authentication. + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase - :param access_token: The access token for OAuth 1.0 authentication. - :type access_token: ~azure.mgmt.datafactory.models.SecretBase - :param access_token_secret: The access token secret for OAuth 1.0 + :param access_token: Required. The access token for OAuth 1.0 authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -81,14 +87,14 @@ class QuickBooksLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None): - super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.company_id = company_id - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self.access_token = access_token - self.access_token_secret = access_token_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(QuickBooksLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.company_id = kwargs.get('company_id', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.consumer_secret = kwargs.get('consumer_secret', None) + self.access_token = kwargs.get('access_token', None) + self.access_token_secret = kwargs.get('access_token_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py new file mode 100644 index 000000000000..be12fc5cfba5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: + super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.company_id = company_id + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = access_token + self.access_token_secret = access_token_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.encrypted_credential = encrypted_credential + self.type = 'QuickBooks' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py index 156255115fe7..73446d0ed938 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py @@ -15,6 +15,8 @@ class QuickBooksObjectDataset(Dataset): """QuickBooks server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class QuickBooksObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class QuickBooksObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class QuickBooksObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(QuickBooksObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py new file mode 100644 index 000000000000..65f67d2b20af --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'QuickBooksObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py index cce839ce513f..cce0a026ae5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -15,6 +15,8 @@ class QuickBooksSource(CopySource): """A copy activity QuickBooks server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class QuickBooksSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class QuickBooksSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(QuickBooksSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py new file mode 100644 index 000000000000..a00f35d4e1c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'QuickBooksSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py index a36216b08620..f23d452392b0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py @@ -40,11 +40,11 @@ class RecurrenceSchedule(Model): 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, } - def __init__(self, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None): - super(RecurrenceSchedule, self).__init__() - self.additional_properties = additional_properties - self.minutes = minutes - self.hours = hours - self.week_days = week_days - self.month_days = month_days - self.monthly_occurrences = monthly_occurrences + def __init__(self, **kwargs): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py index ef87c271fe7a..bbbe1fa28f17 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py @@ -31,8 +31,8 @@ class RecurrenceScheduleOccurrence(Model): 'occurrence': {'key': 'occurrence', 'type': 'int'}, } - def __init__(self, additional_properties=None, day=None, occurrence=None): - super(RecurrenceScheduleOccurrence, self).__init__() - self.additional_properties = additional_properties - self.day = day - self.occurrence = occurrence + def __init__(self, **kwargs): + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py new file mode 100644 index 000000000000..10aea1f00163 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py new file mode 100644 index 000000000000..fbe44fa3f021 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py index 8d5aba7e3ed0..a2e3bddb9425 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py @@ -15,13 +15,15 @@ class RedirectIncompatibleRowSettings(Model): """Redirect incompatible row settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param linked_service_name: Name of the Azure Storage, Storage SAS, or - Azure Data Lake Store linked service used for redirecting incompatible - row. Must be specified if redirectIncompatibleRowSettings is specified. - Type: string (or Expression with resultType string). + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). :type linked_service_name: object :param path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). @@ -38,8 +40,8 @@ class RedirectIncompatibleRowSettings(Model): 'path': {'key': 'path', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, path=None): - super(RedirectIncompatibleRowSettings, self).__init__() - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path + def __init__(self, **kwargs): + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py new file mode 100644 index 000000000000..b47878ef4354 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py index 46552265701d..7114b85e10db 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py @@ -18,15 +18,17 @@ class RedshiftUnloadSettings(Model): will be unloaded into S3 first and then copied into the targeted sink from the interim S3. - :param s3_linked_service_name: The name of the Amazon S3 linked service - which will be used for the unload operation when copying from the Amazon - Redshift source. + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference - :param bucket_name: The bucket of the interim Amazon S3 which will be used - to store the unloaded data from Amazon Redshift source. The bucket must be - in the same region as the Amazon Redshift source. Type: string (or - Expression with resultType string). + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). :type bucket_name: object """ @@ -40,7 +42,7 @@ class RedshiftUnloadSettings(Model): 'bucket_name': {'key': 'bucketName', 'type': 'object'}, } - def __init__(self, s3_linked_service_name, bucket_name): - super(RedshiftUnloadSettings, self).__init__() - self.s3_linked_service_name = s3_linked_service_name - self.bucket_name = bucket_name + def __init__(self, **kwargs): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) + self.bucket_name = kwargs.get('bucket_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py new file mode 100644 index 000000000000..a40d014a32f9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py index aa33cb06da39..2450f31222df 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -15,6 +15,8 @@ class RelationalSource(CopySource): """A copy activity source for various relational databases. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class RelationalSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class RelationalSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(RelationalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py new file mode 100644 index 000000000000..f88383cbd729 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'RelationalSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py index ea0c2bc9d744..e5dd2e0786c8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py @@ -15,6 +15,8 @@ class RelationalTableDataset(Dataset): """The relational table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class RelationalTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class RelationalTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The relational table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class RelationalTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(RelationalTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py new file mode 100644 index 000000000000..3c85d95f8033 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'RelationalTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py index 3b5bf3bde215..8de6a70ecc99 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py @@ -18,6 +18,8 @@ class RerunTriggerResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -26,7 +28,7 @@ class RerunTriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Properties of the rerun trigger. + :param properties: Required. Properties of the rerun trigger. :type properties: ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger """ @@ -47,6 +49,6 @@ class RerunTriggerResource(SubResource): 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, } - def __init__(self, properties): - super(RerunTriggerResource, self).__init__() - self.properties = properties + def __init__(self, **kwargs): + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py new file mode 100644 index 000000000000..19814ad0d76f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py index 0e62cee1c262..8c5ca2d67f3c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -19,6 +19,8 @@ class RerunTumblingWindowTrigger(Trigger): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -32,18 +34,18 @@ class RerunTumblingWindowTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param parent_trigger: The parent trigger reference. :type parent_trigger: object - :param requested_start_time: The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. :type requested_start_time: datetime - :param requested_end_time: The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: The max number of parallel time windows (ready for - execution) for which a rerun is triggered. + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. :type max_concurrency: int """ @@ -67,10 +69,10 @@ class RerunTumblingWindowTrigger(Trigger): 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } - def __init__(self, requested_start_time, requested_end_time, max_concurrency, additional_properties=None, description=None, annotations=None, parent_trigger=None): - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations) - self.parent_trigger = parent_trigger - self.requested_start_time = requested_start_time - self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency + def __init__(self, **kwargs): + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.parent_trigger = kwargs.get('parent_trigger', None) + self.requested_start_time = kwargs.get('requested_start_time', None) + self.requested_end_time = kwargs.get('requested_end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py index 50d6a0b91966..4b87f070b6be 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py @@ -15,14 +15,16 @@ class RerunTumblingWindowTriggerActionParameters(Model): """Rerun tumbling window trigger Parameters. - :param start_time: The start time for the time period for which + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. :type start_time: datetime - :param end_time: The end time for the time period for which restatement is - initiated. Only UTC time is currently supported. + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. :type end_time: datetime - :param max_concurrency: The max number of parallel time windows (ready for - execution) for which a rerun is triggered. + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. :type max_concurrency: int """ @@ -38,8 +40,8 @@ class RerunTumblingWindowTriggerActionParameters(Model): 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, } - def __init__(self, start_time, end_time, max_concurrency): - super(RerunTumblingWindowTriggerActionParameters, self).__init__() - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency + def __init__(self, **kwargs): + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py new file mode 100644 index 000000000000..6fadecca588b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..4a7a20759c1b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency + self.type = 'RerunTumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py index f68a72080d53..f6b2d7d3b512 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py @@ -48,11 +48,11 @@ class Resource(Model): 'e_tag': {'key': 'eTag', 'type': 'str'}, } - def __init__(self, location=None, tags=None): - super(Resource, self).__init__() + def __init__(self, **kwargs): + super(Resource, self).__init__(**kwargs) self.id = None self.name = None self.type = None - self.location = location - self.tags = tags + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py new file mode 100644 index 000000000000..cfc0e4b09aa5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.e_tag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py index 658fc1d32f96..16d1af502787 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -15,6 +15,8 @@ class ResponsysLinkedService(LinkedService): """Responsys linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,12 +31,12 @@ class ResponsysLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the Responsys server. + :param endpoint: Required. The endpoint of the Responsys server. :type endpoint: object - :param client_id: The client ID associated with the Responsys application. - Type: string (or Expression with resultType string). + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). @@ -80,13 +82,13 @@ class ResponsysLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ResponsysLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py new file mode 100644 index 000000000000..6d8a74a0a34b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Responsys' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py index d9563d4750cd..f459e69113a1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py @@ -15,6 +15,8 @@ class ResponsysObjectDataset(Dataset): """Responsys dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ResponsysObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ResponsysObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class ResponsysObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(ResponsysObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py new file mode 100644 index 000000000000..c5f375910aaf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ResponsysObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py index 7f641db163f6..fd25b8e71377 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -15,6 +15,8 @@ class ResponsysSource(CopySource): """A copy activity Responsys source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ResponsysSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ResponsysSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ResponsysSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py new file mode 100644 index 000000000000..8d5e4ac091f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ResponsysSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py index 602e0419b5ab..9a5d41858e54 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -15,6 +15,8 @@ class RestResourceDataset(Dataset): """A Rest service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class RestResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class RestResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). @@ -81,11 +83,11 @@ class RestResourceDataset(Dataset): 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None): - super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py new file mode 100644 index 000000000000..99f39c97f373 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py index 7b4c8784a345..0fbb15654438 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -15,6 +15,8 @@ class RestServiceLinkedService(LinkedService): """Rest Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,17 +31,17 @@ class RestServiceLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The base URL of the REST service. + :param url: Required. The base URL of the REST service. :type url: object :param enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). :type enable_server_certificate_validation: object - :param authentication_type: Type of authentication used to connect to the - REST service. Possible values include: 'Anonymous', 'Basic', - 'AadServicePrincipal', 'ManagedServiceIdentity' + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. @@ -90,16 +92,16 @@ class RestServiceLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, url, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None): - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.enable_server_certificate_validation = enable_server_certificate_validation - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py new file mode 100644 index 000000000000..9af9f609e52b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py index dfcaa1b03730..f32d4d67e427 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -15,6 +15,8 @@ class RestSource(CopySource): """A copy activity Rest service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class RestSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). @@ -73,12 +75,12 @@ class RestSource(CopySource): 'request_interval': {'key': 'requestInterval', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None): - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules - self.http_request_timeout = http_request_timeout - self.request_interval = request_interval + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py new file mode 100644 index 000000000000..5fcbb2f7a76d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py index dd6aa0d11d76..e6f5b1876259 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py @@ -32,7 +32,7 @@ class RetryPolicy(Model): 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, } - def __init__(self, count=None, interval_in_seconds=None): - super(RetryPolicy, self).__init__() - self.count = count - self.interval_in_seconds = interval_in_seconds + def __init__(self, **kwargs): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py new file mode 100644 index 000000000000..b51b87a49938 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py index 150161e78a05..9271f7adf029 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py @@ -15,14 +15,16 @@ class RunFilterParameters(Model): """Query parameters for listing runs. + All required parameters must be populated in order to send to Azure. + :param continuation_token: The continuation token for getting the next page of results. Null for first page. :type continuation_token: str - :param last_updated_after: The time at or after which the run event was - updated in 'ISO 8601' format. + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. :type last_updated_after: datetime - :param last_updated_before: The time at or before which the run event was - updated in 'ISO 8601' format. + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. :type last_updated_before: datetime :param filters: List of filters. :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] @@ -43,10 +45,10 @@ class RunFilterParameters(Model): 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, } - def __init__(self, last_updated_after, last_updated_before, continuation_token=None, filters=None, order_by=None): - super(RunFilterParameters, self).__init__() - self.continuation_token = continuation_token - self.last_updated_after = last_updated_after - self.last_updated_before = last_updated_before - self.filters = filters - self.order_by = order_by + def __init__(self, **kwargs): + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.last_updated_after = kwargs.get('last_updated_after', None) + self.last_updated_before = kwargs.get('last_updated_before', None) + self.filters = kwargs.get('filters', None) + self.order_by = kwargs.get('order_by', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py new file mode 100644 index 000000000000..c96e64eb63b3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.last_updated_after = last_updated_after + self.last_updated_before = last_updated_before + self.filters = filters + self.order_by = order_by diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py index 063002c27fdd..7d54150a6815 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -15,20 +15,22 @@ class RunQueryFilter(Model): """Query filter option for listing runs. - :param operand: Parameter name to be used for filter. The allowed operands - to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to - query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, - ActivityType and Status, and to query trigger runs are TriggerName, - TriggerRunTimestamp and Status. Possible values include: 'PipelineName', - 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', - 'ActivityRunEnd', 'ActivityType', 'TriggerName', 'TriggerRunTimestamp', - 'RunGroupId', 'LatestOnly' + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand - :param operator: Operator to be used for filter. Possible values include: - 'Equals', 'NotEquals', 'In', 'NotIn' + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator - :param values: List of filter values. + :param values: Required. List of filter values. :type values: list[str] """ @@ -44,8 +46,8 @@ class RunQueryFilter(Model): 'values': {'key': 'values', 'type': '[str]'}, } - def __init__(self, operand, operator, values): - super(RunQueryFilter, self).__init__() - self.operand = operand - self.operator = operator - self.values = values + def __init__(self, **kwargs): + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = kwargs.get('operand', None) + self.operator = kwargs.get('operator', None) + self.values = kwargs.get('values', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py new file mode 100644 index 000000000000..814e7a4b499b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, *, operand, operator, values, **kwargs) -> None: + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = operand + self.operator = operator + self.values = values diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py index 596b68dfdac4..21afabcf215f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py @@ -15,16 +15,18 @@ class RunQueryOrderBy(Model): """An object to provide order by options for listing runs. - :param order_by: Parameter name to be used for order by. The allowed - parameters to order by for pipeline runs are PipelineName, RunStart, - RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, - ActivityRunEnd and Status; for trigger runs are TriggerName, - TriggerRunTimestamp and Status. Possible values include: 'RunStart', - 'RunEnd', 'PipelineName', 'Status', 'ActivityName', 'ActivityRunStart', - 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField - :param order: Sorting order of the parameter. Possible values include: - 'ASC', 'DESC' + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ @@ -38,7 +40,7 @@ class RunQueryOrderBy(Model): 'order': {'key': 'order', 'type': 'str'}, } - def __init__(self, order_by, order): - super(RunQueryOrderBy, self).__init__() - self.order_by = order_by - self.order = order + def __init__(self, **kwargs): + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = kwargs.get('order_by', None) + self.order = kwargs.get('order', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py new file mode 100644 index 000000000000..a3ddc8854d47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, *, order_by, order, **kwargs) -> None: + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = order_by + self.order = order diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py index 00674e19d285..c644ac664831 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -15,6 +15,8 @@ class SalesforceLinkedService(LinkedService): """Linked service for Salesforce. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SalesforceLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify @@ -70,11 +72,11 @@ class SalesforceLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None): - super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SalesforceLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py new file mode 100644 index 000000000000..05fcea7a3990 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.encrypted_credential = encrypted_credential + self.type = 'Salesforce' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py index 88373e4cfef8..93b4fcdb3d1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -15,6 +15,8 @@ class SalesforceMarketingCloudLinkedService(LinkedService): """Salesforce Marketing Cloud linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,11 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param client_id: The client ID associated with the Salesforce Marketing - Cloud application. Type: string (or Expression with resultType string). + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType @@ -77,12 +80,12 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py new file mode 100644 index 000000000000..d7e09e27a43f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceMarketingCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py index cf0998c76fcc..20f581ce1c50 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py @@ -15,6 +15,8 @@ class SalesforceMarketingCloudObjectDataset(Dataset): """Salesforce Marketing Cloud dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SalesforceMarketingCloudObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class SalesforceMarketingCloudObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..526ac806649f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SalesforceMarketingCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py index fd375ee3cb7f..09a0eca1758e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -15,6 +15,8 @@ class SalesforceMarketingCloudSource(CopySource): """A copy activity Salesforce Marketing Cloud source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SalesforceMarketingCloudSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SalesforceMarketingCloudSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SalesforceMarketingCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py new file mode 100644 index 000000000000..9b898af0c3a1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SalesforceMarketingCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py index 53ebb3201608..10cfce97fe0f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py @@ -15,6 +15,8 @@ class SalesforceObjectDataset(Dataset): """The Salesforce object dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SalesforceObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SalesforceObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class SalesforceObjectDataset(Dataset): 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None): - super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.object_api_name = object_api_name + def __init__(self, **kwargs): + super(SalesforceObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py new file mode 100644 index 000000000000..3c3f75d6059e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py index e96d6c64c36b..fb6476ac9a30 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py @@ -15,6 +15,8 @@ class SalesforceServiceCloudLinkedService(LinkedService): """Linked service for Salesforce Service Cloud. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, @@ -74,12 +76,12 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None): - super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.extended_properties = extended_properties - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..3f0b3cc64d91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py index 2bd8b554c0a4..1f5cb3bb5bf1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py @@ -15,6 +15,8 @@ class SalesforceServiceCloudObjectDataset(Dataset): """The Salesforce Service Cloud object dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SalesforceServiceCloudObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SalesforceServiceCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class SalesforceServiceCloudObjectDataset(Dataset): 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None): - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.object_api_name = object_api_name + def __init__(self, **kwargs): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..d215f5f0084d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py index f7ea135bf5d0..99e2b1a2c924 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py @@ -15,6 +15,8 @@ class SalesforceServiceCloudSink(CopySink): """A copy activity Salesforce Service Cloud sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: 'Insert', 'Upsert' @@ -74,9 +76,9 @@ class SalesforceServiceCloudSink(CopySink): 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None): - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values + def __init__(self, **kwargs): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py new file mode 100644 index 000000000000..2abfaa12d0e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py index 378a3d06e58e..255bfab477bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py @@ -15,6 +15,8 @@ class SalesforceServiceCloudSource(CopySource): """A copy activity Salesforce Service Cloud source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SalesforceServiceCloudSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -54,8 +56,8 @@ class SalesforceServiceCloudSource(CopySource): 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None): - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.read_behavior = read_behavior + def __init__(self, **kwargs): + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py new file mode 100644 index 000000000000..77bb267f5a47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 2ffe781bacad..9a1291bd4bfe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -15,6 +15,8 @@ class SalesforceSink(CopySink): """A copy activity Salesforce sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: 'Insert', 'Upsert' @@ -74,9 +76,9 @@ class SalesforceSink(CopySink): 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None): - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.write_behavior = write_behavior - self.external_id_field_name = external_id_field_name - self.ignore_null_values = ignore_null_values + def __init__(self, **kwargs): + super(SalesforceSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py new file mode 100644 index 000000000000..54a56618d01e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 3d076f304a4b..4f2590c3ab9d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -15,6 +15,8 @@ class SalesforceSource(CopySource): """A copy activity Salesforce source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SalesforceSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -54,8 +56,8 @@ class SalesforceSource(CopySource): 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None): - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.read_behavior = read_behavior + def __init__(self, **kwargs): + super(SalesforceSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py new file mode 100644 index 000000000000..4441e92eaff3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py index 42d8ffe89f77..048d26f85696 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset.py @@ -15,6 +15,8 @@ class SapBwCubeDataset(Dataset): """The SAP BW cube dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SapBwCubeDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SapBwCubeDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -48,6 +50,18 @@ class SapBwCubeDataset(Dataset): 'type': {'required': True}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapBwCubeDataset, self).__init__(**kwargs) self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py new file mode 100644 index 000000000000..08334a824ba4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_cube_dataset_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapBwCube' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py index cc11ab6d60b9..a57164c7215d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -15,6 +15,8 @@ class SapBWLinkedService(LinkedService): """SAP Business Warehouse Linked Service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,18 +31,18 @@ class SapBWLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param server: Host name of the SAP BW instance. Type: string (or - Expression with resultType string). + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). :type server: object - :param system_number: System number of the BW system. (Usually a two-digit - decimal number represented as a string.) Type: string (or Expression with - resultType string). - :type system_number: object - :param client_id: Client ID of the client on the BW system. (Usually a - three-digit decimal number represented as a string) Type: string (or + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). :type client_id: object :param user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). @@ -75,12 +77,12 @@ class SapBWLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, system_number, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None): - super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SapBWLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py new file mode 100644 index 000000000000..92aef25dc215 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapBW' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py index eedd7d2dd4c0..e3762d8e694e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source.py @@ -15,6 +15,8 @@ class SapBwSource(CopySource): """A copy activity source for SapBW server via MDX. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SapBwSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: MDX query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SapBwSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SapBwSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py new file mode 100644 index 000000000000..ed6ff734742d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapBwSource(CopySource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: MDX query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapBwSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py index 92f6867357ee..53d47ab8ae41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -15,6 +15,8 @@ class SapCloudForCustomerLinkedService(LinkedService): """Linked service for SAP Cloud for Customer. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,11 +31,11 @@ class SapCloudForCustomerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The URL of SAP Cloud for Customer OData API. For example, - '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string - (or Expression with resultType string). + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). :type url: object :param username: The username for Basic authentication. Type: string (or Expression with resultType string). @@ -65,10 +67,10 @@ class SapCloudForCustomerLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None): - super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py new file mode 100644 index 000000000000..9e47fd696503 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapCloudForCustomer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py index 842721ddf1e9..436b251207a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py @@ -15,6 +15,8 @@ class SapCloudForCustomerResourceDataset(Dataset): """The path of the SAP Cloud for Customer OData entity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SapCloudForCustomerResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class SapCloudForCustomerResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param path: The path of the SAP Cloud for Customer OData entity. Type: - string (or Expression with resultType string). + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). :type path: object """ @@ -65,7 +67,7 @@ class SapCloudForCustomerResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, linked_service_name, path, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.path = path + def __init__(self, **kwargs): + super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py new file mode 100644 index 000000000000..455bad7c9095 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapCloudForCustomerResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index f7e8379fabea..e5a37858abb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -15,6 +15,8 @@ class SapCloudForCustomerSink(CopySink): """A copy activity SAP Cloud for Customer sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: 'Insert', 'Update' @@ -59,7 +61,7 @@ class SapCloudForCustomerSink(CopySink): 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None): - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.write_behavior = write_behavior + def __init__(self, **kwargs): + super(SapCloudForCustomerSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py new file mode 100644 index 000000000000..29f01fdd6891 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. Possible values include: 'Insert', 'Update' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'SapCloudForCustomerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py index 8a21540f5e4d..561c1b342f93 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -15,6 +15,8 @@ class SapCloudForCustomerSource(CopySource): """A copy activity source for SAP Cloud for Customer source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SapCloudForCustomerSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SapCloudForCustomerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SapCloudForCustomerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py new file mode 100644 index 000000000000..e9dab6ad1899 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapCloudForCustomerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py index 7fbe29c25b15..0ca69242055f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -15,6 +15,8 @@ class SapEccLinkedService(LinkedService): """Linked service for SAP ERP Central Component(SAP ECC). + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class SapEccLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param url: The URL of SAP ECC OData API. For example, + :param url: Required. The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). :type url: str @@ -65,10 +67,10 @@ class SapEccLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, url, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None): - super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SapEccLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py new file mode 100644 index 000000000000..7afd76b8fe09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: + super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapEcc' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py index d893907b4205..f79367f49b3d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -15,6 +15,8 @@ class SapEccResourceDataset(Dataset): """The path of the SAP ECC OData entity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SapEccResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class SapEccResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param path: The path of the SAP ECC OData entity. Type: string (or - Expression with resultType string). + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). :type path: object """ @@ -65,7 +67,7 @@ class SapEccResourceDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, linked_service_name, path, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.path = path + def __init__(self, **kwargs): + super(SapEccResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py new file mode 100644 index 000000000000..76aaeb9bb9f2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapEccResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 3a609576d56b..6379c33713d4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -15,6 +15,8 @@ class SapEccSource(CopySource): """A copy activity source for SAP ECC source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SapEccSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SapEccSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SapEccSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py new file mode 100644 index 000000000000..4412cac39960 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapEccSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py index ad8c49f859be..14eda87b7be6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -15,6 +15,8 @@ class SapHanaLinkedService(LinkedService): """SAP HANA Linked Service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,13 +31,13 @@ class SapHanaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param server: Host name of the SAP HANA server. Type: string (or - Expression with resultType string). + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' @@ -72,12 +74,12 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None): - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SapHanaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py new file mode 100644 index 000000000000..de378a5b2bf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: SAP HANA ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapHana' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py index d4ee824fcd4a..e946dbcd9a50 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source.py @@ -15,6 +15,8 @@ class SapHanaSource(CopySource): """A copy activity source for SAP HANA source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SapHanaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). @@ -53,8 +55,8 @@ class SapHanaSource(CopySource): 'packet_size': {'key': 'packetSize', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None): - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.packet_size = packet_size + def __init__(self, **kwargs): + super(SapHanaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py new file mode 100644 index 000000000000..730326c19183 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapHanaSource(CopySource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP HANA Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: + integer(or Expression with resultType integer). + :type packet_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.packet_size = packet_size + self.type = 'SapHanaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py index 8568b4e98459..6ff1ae31cd22 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset.py @@ -15,6 +15,8 @@ class SapHanaTableDataset(Dataset): """SAP HANA Table properties. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SapHanaTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SapHanaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). @@ -68,8 +70,8 @@ class SapHanaTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None): - super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(SapHanaTableDataset, self).__init__(**kwargs) + self.sap_hana_table_dataset_schema = kwargs.get('sap_hana_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py new file mode 100644 index 000000000000..6dc5c48ba21d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: + string (or Expression with resultType string). + :type sap_hana_table_dataset_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema + self.table = table + self.type = 'SapHanaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py index c133b84f7118..bfe9c323d302 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -15,6 +15,8 @@ class SapOpenHubLinkedService(LinkedService): """SAP Business Warehouse Open Hub Destination Linked Service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,18 +31,19 @@ class SapOpenHubLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param server: Host name of the SAP BW instance where the open hub - destination is located. Type: string (or Expression with resultType + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType string). :type server: object - :param system_number: System number of the BW system where the open hub - destination is located. (Usually a two-digit decimal number represented as - a string.) Type: string (or Expression with resultType string). + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). :type system_number: object - :param client_id: Client ID of the client on the BW system where the open - hub destination is located. (Usually a three-digit decimal number + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). :type client_id: object @@ -84,13 +87,13 @@ class SapOpenHubLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, system_number, client_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None): - super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py new file mode 100644 index 000000000000..eddc50b0f1c5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py index 8178a134edae..d6dcbda60e36 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -16,6 +16,8 @@ class SapOpenHubSource(CopySource): """A copy activity source for SAP Business Warehouse Open Hub Destination source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -30,7 +32,7 @@ class SapOpenHubSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with @@ -57,8 +59,8 @@ class SapOpenHubSource(CopySource): 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None): - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py new file mode 100644 index 000000000000..752ffd8554b0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py index 0f5c97b8ec6b..2682969c5016 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -15,6 +15,8 @@ class SapOpenHubTableDataset(Dataset): """Sap Business Warehouse Open Hub Destination Table properties. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SapOpenHubTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,11 +41,11 @@ class SapOpenHubTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param open_hub_destination_name: The name of the Open Hub Destination - with destination type as Database Table. Type: string (or Expression with - resultType string). + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). :type open_hub_destination_name: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with @@ -77,9 +79,9 @@ class SapOpenHubTableDataset(Dataset): 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, } - def __init__(self, linked_service_name, open_hub_destination_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None): - super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.open_hub_destination_name = open_hub_destination_name - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py new file mode 100644 index 000000000000..b06a53c10db3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py index 3a33f5c8e614..83b76d0a4fdd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service.py @@ -15,6 +15,8 @@ class SapTableLinkedService(LinkedService): """SAP Table Linked Service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SapTableLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). @@ -117,22 +119,22 @@ class SapTableLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None): - super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.system_id = system_id - self.user_name = user_name - self.password = password - self.message_server = message_server - self.message_server_service = message_server_service - self.snc_mode = snc_mode - self.snc_my_name = snc_my_name - self.snc_partner_name = snc_partner_name - self.snc_library_path = snc_library_path - self.snc_qop = snc_qop - self.logon_group = logon_group - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SapTableLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py new file mode 100644 index 000000000000..d098acc1bbda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_linked_service_py3.py @@ -0,0 +1,140 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Host name of the SAP instance where the table is located. + Type: string (or Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is + located. (Usually a two-digit decimal number represented as a string.) + Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the + table is located. (Usually a three-digit decimal number represented as a + string) Type: string (or Expression with resultType string). + :type client_id: object + :param language: Language of the SAP system where the table is located. + The default value is EN. Type: string (or Expression with resultType + string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. + Type: string (or Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is + located. Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is + located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: + string (or Expression with resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the + Message Server. Type: string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where + the table is located. Must be either 0 (off) or 1 (on). Type: string (or + Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where + the table is located. Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the + SAP server where the table is located. Type: string (or Expression with + resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, + 8, 9. Type: string (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or + Expression with resultType string). + :type logon_group: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group + self.encrypted_credential = encrypted_credential + self.type = 'SapTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py index febdbc581f07..b688fe16683b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings.py @@ -39,9 +39,9 @@ class SapTablePartitionSettings(Model): 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, } - def __init__(self, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None): - super(SapTablePartitionSettings, self).__init__() - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound - self.max_partitions_number = max_partitions_number + def __init__(self, **kwargs): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py new file mode 100644 index 000000000000..37bdf610ab35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_partition_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SapTablePartitionSettings(Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table + will be split into. Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py index 1fe2274850ad..24601ba6b793 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset.py @@ -15,6 +15,8 @@ class SapTableResourceDataset(Dataset): """SAP Table Resource properties. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SapTableResourceDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class SapTableResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param table_name: The name of the SAP Table. Type: string (or Expression - with resultType string). + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). :type table_name: object """ @@ -65,7 +67,7 @@ class SapTableResourceDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, table_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None): - super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(SapTableResourceDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py new file mode 100644 index 000000000000..7b034ccd3a91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The name of the SAP Table. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SapTableResource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py index 79173bd615ae..35799515440e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source.py @@ -15,6 +15,8 @@ class SapTableSource(CopySource): """A copy activity source for SAP Table source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SapTableSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). @@ -85,14 +87,14 @@ class SapTableSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None): - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.row_count = row_count - self.row_skips = row_skips - self.rfc_table_fields = rfc_table_fields - self.rfc_table_options = rfc_table_options - self.batch_size = batch_size - self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module - self.partition_option = partition_option - self.partition_settings = partition_settings + def __init__(self, **kwargs): + super(SapTableSource, self).__init__(**kwargs) + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py new file mode 100644 index 000000000000..bed7bbb93932 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_table_source_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapTableSource(CopySource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param row_count: The number of rows to be retrieved. Type: integer(or + Expression with resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer + (or Expression with resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be + retrieved. For example, column0, column1. Type: string (or Expression with + resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. + For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with + resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be + retrieved at a time when retrieving data from SAP Table. Type: integer (or + Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC + function module that will be used to read data from SAP Table. Type: + string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP + table read in parallel. Possible values include: 'None', 'PartitionOnInt', + 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', + 'PartitionOnCalendarDate', 'PartitionOnTime' + :type partition_option: str or + ~azure.mgmt.datafactory.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP + table source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'SapTableSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py index 2c4ef68ae485..b9ea331b8c6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -18,6 +18,8 @@ class ScheduleTrigger(MultiplePipelineTrigger): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -31,12 +33,12 @@ class ScheduleTrigger(MultiplePipelineTrigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] - :param recurrence: Recurrence schedule configuration. + :param recurrence: Required. Recurrence schedule configuration. :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ @@ -56,7 +58,7 @@ class ScheduleTrigger(MultiplePipelineTrigger): 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } - def __init__(self, recurrence, additional_properties=None, description=None, annotations=None, pipelines=None): - super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines) - self.recurrence = recurrence + def __init__(self, **kwargs): + super(ScheduleTrigger, self).__init__(**kwargs) + self.recurrence = kwargs.get('recurrence', None) self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py new file mode 100644 index 000000000000..f13f01c7fa13 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.recurrence = recurrence + self.type = 'ScheduleTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py index 021ad0afeb80..85408c45547b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py @@ -43,12 +43,12 @@ class ScheduleTriggerRecurrence(Model): 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, } - def __init__(self, additional_properties=None, frequency=None, interval=None, start_time=None, end_time=None, time_zone=None, schedule=None): - super(ScheduleTriggerRecurrence, self).__init__() - self.additional_properties = additional_properties - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.time_zone = time_zone - self.schedule = schedule + def __init__(self, **kwargs): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py new file mode 100644 index 000000000000..a9b6eded7b96 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py index b4f0a8291f33..50bc0131a5cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py @@ -15,12 +15,14 @@ class ScriptAction(Model): """Custom script action to run on HDI ondemand cluster once it's up. - :param name: The user provided name of the script action. + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. :type name: str - :param uri: The URI for the script action. + :param uri: Required. The URI for the script action. :type uri: str - :param roles: The node types on which the script action should be - executed. + :param roles: Required. The node types on which the script action should + be executed. :type roles: object :param parameters: The parameters for the script action. :type parameters: str @@ -39,9 +41,9 @@ class ScriptAction(Model): 'parameters': {'key': 'parameters', 'type': 'str'}, } - def __init__(self, name, uri, roles, parameters=None): - super(ScriptAction, self).__init__() - self.name = name - self.uri = uri - self.roles = roles - self.parameters = parameters + def __init__(self, **kwargs): + super(ScriptAction, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.uri = kwargs.get('uri', None) + self.roles = kwargs.get('roles', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py new file mode 100644 index 000000000000..c0e278073219 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: + super(ScriptAction, self).__init__(**kwargs) + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py index e7875d601f55..3d9475dd4382 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py @@ -18,7 +18,9 @@ class SecretBase(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: SecureString, AzureKeyVaultSecretReference - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str """ @@ -34,6 +36,6 @@ class SecretBase(Model): 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} } - def __init__(self): - super(SecretBase, self).__init__() + def __init__(self, **kwargs): + super(SecretBase, self).__init__(**kwargs) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py new file mode 100644 index 000000000000..29403e61b245 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs) -> None: + super(SecretBase, self).__init__(**kwargs) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py index 3cc6e7630fca..bec430fdf8a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py @@ -16,9 +16,11 @@ class SecureString(SecretBase): """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str - :param value: Value of secure string. + :param value: Required. Value of secure string. :type value: str """ @@ -32,7 +34,7 @@ class SecureString(SecretBase): 'value': {'key': 'value', 'type': 'str'}, } - def __init__(self, value): - super(SecureString, self).__init__() - self.value = value + def __init__(self, **kwargs): + super(SecureString, self).__init__(**kwargs) + self.value = kwargs.get('value', None) self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py new file mode 100644 index 000000000000..d7ebd5e13e78 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base_py3 import SecretBase + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, value: str, **kwargs) -> None: + super(SecureString, self).__init__(**kwargs) + self.value = value + self.type = 'SecureString' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py index b4126035d4f3..fc56f8e8a799 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py @@ -15,10 +15,12 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): """Self referenced tumbling window trigger dependency. - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str - :param offset: Timespan applied to the start time of a tumbling window - when evaluating dependency. + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. :type offset: str :param size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. @@ -37,8 +39,8 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): 'size': {'key': 'size', 'type': 'str'}, } - def __init__(self, offset, size=None): - super(SelfDependencyTumblingWindowTriggerReference, self).__init__() - self.offset = offset - self.size = size + def __init__(self, **kwargs): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py new file mode 100644 index 000000000000..1dd1e575c2e8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference_py3 import DependencyReference + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = offset + self.size = size + self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py index 13a81ce89a6f..20744f02306d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py @@ -15,12 +15,14 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): """Self-hosted integration runtime. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_info: :type linked_info: @@ -38,7 +40,7 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, } - def __init__(self, additional_properties=None, description=None, linked_info=None): - super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description) - self.linked_info = linked_info + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.linked_info = kwargs.get('linked_info', None) self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py index c0f9705bd12a..1491a80dc19a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py @@ -116,9 +116,9 @@ class SelfHostedIntegrationRuntimeNode(Model): 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, } - def __init__(self, additional_properties=None): - super(SelfHostedIntegrationRuntimeNode, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.node_name = None self.machine_name = None self.host_service_uri = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py new file mode 100644 index 000000000000..59b703737a5d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py @@ -0,0 +1,139 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py new file mode 100644 index 000000000000..a25d04373849 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_py3 import IntegrationRuntime + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.linked_info = linked_info + self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py index 25226a256900..5dd9995987d9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py @@ -18,6 +18,8 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. @@ -123,13 +125,13 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, } - def __init__(self, additional_properties=None, nodes=None, links=None): - super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties) + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) self.create_time = None self.task_queue_id = None self.internal_channel_encryption = None self.version = None - self.nodes = nodes + self.nodes = kwargs.get('nodes', None) self.scheduled_update_date = None self.update_delay_offset = None self.local_time_zone_offset = None @@ -137,7 +139,7 @@ def __init__(self, additional_properties=None, nodes=None, links=None): self.service_urls = None self.auto_update = None self.version_status = None - self.links = links + self.links = kwargs.get('links', None) self.pushed_version = None self.latest_version = None self.auto_update_eta = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py new file mode 100644 index 000000000000..acad7661fc15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status_py3 import IntegrationRuntimeStatus + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = nodes + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = links + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py index ed4f6b5f4e1d..4d42f575e769 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -15,6 +15,8 @@ class ServiceNowLinkedService(LinkedService): """ServiceNow server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,13 +31,13 @@ class ServiceNowLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the ServiceNow server. (i.e. + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. .service-now.com) :type endpoint: object - :param authentication_type: The authentication type to use. Possible - values include: 'Basic', 'OAuth2' + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server @@ -89,16 +91,16 @@ class ServiceNowLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.authentication_type = authentication_type - self.username = username - self.password = password - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ServiceNowLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py new file mode 100644 index 000000000000..b9d166f241d6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.authentication_type = authentication_type + self.username = username + self.password = password + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'ServiceNow' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py index eb2785201321..a9821ba0fd10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py @@ -15,6 +15,8 @@ class ServiceNowObjectDataset(Dataset): """ServiceNow server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ServiceNowObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ServiceNowObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class ServiceNowObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(ServiceNowObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py new file mode 100644 index 000000000000..fcd2fd537a31 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ServiceNowObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py index d00397e49404..16b10bb8de5e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -15,6 +15,8 @@ class ServiceNowSource(CopySource): """A copy activity ServiceNow server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ServiceNowSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ServiceNowSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ServiceNowSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py new file mode 100644 index 000000000000..20d1a64d04d3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ServiceNowSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py index 05adf0210720..e8dd1690862d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py @@ -15,10 +15,12 @@ class SetVariableActivity(ControlActivity): """Set value for a Variable. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class SetVariableActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str @@ -50,8 +52,8 @@ class SetVariableActivity(ControlActivity): 'value': {'key': 'typeProperties.value', 'type': 'object'}, } - def __init__(self, name, additional_properties=None, description=None, depends_on=None, user_properties=None, variable_name=None, value=None): - super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.variable_name = variable_name - self.value = value + def __init__(self, **kwargs): + super(SetVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py new file mode 100644 index 000000000000..e045abee3dfb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'SetVariable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py index 49a19e5398f1..5b8fd4e42ba2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -15,10 +15,12 @@ class SftpLocation(DatasetLocation): """The location of SFTP dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Type of dataset storage location. + :param type: Required. Type of dataset storage location. :type type: str :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) @@ -32,5 +34,12 @@ class SftpLocation(DatasetLocation): 'type': {'required': True}, } - def __init__(self, type, additional_properties=None, folder_path=None, file_name=None): - super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py index 8e2df93c4d31..5e7b4faf77ad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings.py @@ -15,10 +15,12 @@ class SftpReadSettings(StoreReadSettings): """Sftp read settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -57,10 +59,10 @@ class SftpReadSettings(StoreReadSettings): 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None): - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections) - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + def __init__(self, **kwargs): + super(SftpReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py new file mode 100644 index 000000000000..e6c27e3ad08a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_settings_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .store_read_settings_py3 import StoreReadSettings + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py index cc7839f000a7..aa4c535fc514 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -15,6 +15,8 @@ class SftpServerLinkedService(LinkedService): """A linked service for an SSH File Transfer Protocol (SFTP) server. . + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,10 +31,10 @@ class SftpServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The SFTP server host name. Type: string (or Expression with - resultType string). + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). :type host: object :param port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with @@ -101,17 +103,17 @@ class SftpServerLinkedService(LinkedService): 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None): - super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.private_key_path = private_key_path - self.private_key_content = private_key_content - self.pass_phrase = pass_phrase - self.skip_host_key_validation = skip_host_key_validation - self.host_key_fingerprint = host_key_fingerprint + def __init__(self, **kwargs): + super(SftpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.private_key_path = kwargs.get('private_key_path', None) + self.private_key_content = kwargs.get('private_key_content', None) + self.pass_phrase = kwargs.get('pass_phrase', None) + self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) + self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py new file mode 100644 index 000000000000..7decd7781348 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: + super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.private_key_path = private_key_path + self.private_key_content = private_key_content + self.pass_phrase = pass_phrase + self.skip_host_key_validation = skip_host_key_validation + self.host_key_fingerprint = host_key_fingerprint + self.type = 'Sftp' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py index d5f4a5fd2e26..ee5311dceb7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -15,6 +15,8 @@ class ShopifyLinkedService(LinkedService): """Shopify Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class ShopifyLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The endpoint of the Shopify server. (i.e. + :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com) :type host: object :param access_token: The API access token that can be used to access @@ -73,12 +75,12 @@ class ShopifyLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ShopifyLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py new file mode 100644 index 000000000000..ea6189277552 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Shopify' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py index 16b4d8caba37..ab3e475b9c97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py @@ -15,6 +15,8 @@ class ShopifyObjectDataset(Dataset): """Shopify Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ShopifyObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ShopifyObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class ShopifyObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(ShopifyObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py new file mode 100644 index 000000000000..98b9c43c21e8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ShopifyObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py index c5a05de69491..d4596976d459 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -15,6 +15,8 @@ class ShopifySource(CopySource): """A copy activity Shopify Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ShopifySource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ShopifySource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ShopifySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py new file mode 100644 index 000000000000..6b56edd62904 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ShopifySource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py index 9eeea5cf6481..4f9ab49a7bba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -15,6 +15,8 @@ class SparkLinkedService(LinkedService): """Spark Server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,12 +31,12 @@ class SparkLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: IP address or host name of the Spark server + :param host: Required. IP address or host name of the Spark server :type host: object - :param port: The TCP port that the Spark server uses to listen for client - connections. + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. :type port: object :param server_type: The type of Spark server. Possible values include: 'SharkServer', 'SharkServer2', 'SparkThriftServer' @@ -43,8 +45,8 @@ class SparkLinkedService(LinkedService): Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' :type thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol - :param authentication_type: The authentication method used to access the - Spark server. Possible values include: 'Anonymous', 'Username', + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType @@ -110,20 +112,20 @@ class SparkLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, port, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None): - super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SparkLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py new file mode 100644 index 000000000000..f6433b6ab187 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Spark' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py index d270a75921f3..bdbdf067e1ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py @@ -15,6 +15,8 @@ class SparkObjectDataset(Dataset): """Spark Server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SparkObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SparkObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class SparkObjectDataset(Dataset): 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None): - super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.spark_object_dataset_schema = spark_object_dataset_schema + def __init__(self, **kwargs): + super(SparkObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.spark_object_dataset_schema = kwargs.get('spark_object_dataset_schema', None) self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py new file mode 100644 index 000000000000..afe383951f1c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression + with resultType string). + :type table: object + :param spark_object_dataset_schema: The schema name of the Spark. Type: + string (or Expression with resultType string). + :type spark_object_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: + super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.spark_object_dataset_schema = spark_object_dataset_schema + self.type = 'SparkObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py index ed271ea59c92..6d670c1c6b2a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -15,6 +15,8 @@ class SparkSource(CopySource): """A copy activity Spark Server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SparkSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SparkSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SparkSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py new file mode 100644 index 000000000000..8da01b0cd823 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SparkSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py index 3631e11983d7..8fe57eaa3595 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -15,6 +15,8 @@ class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). @@ -72,10 +74,10 @@ class SqlDWSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None): - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.pre_copy_script = pre_copy_script - self.allow_poly_base = allow_poly_base - self.poly_base_settings = poly_base_settings - self.table_option = table_option + def __init__(self, **kwargs): + super(SqlDWSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.allow_poly_base = kwargs.get('allow_poly_base', None) + self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py new file mode 100644 index 000000000000..6f9241560e59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.table_option = table_option + self.type = 'SqlDWSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py index d3e7690a5264..1a020672f7c2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -15,6 +15,8 @@ class SqlDWSource(CopySource): """A copy activity SQL Data Warehouse source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SqlDWSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). @@ -60,9 +62,9 @@ class SqlDWSource(CopySource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None): - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters + def __init__(self, **kwargs): + super(SqlDWSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py new file mode 100644 index 000000000000..ae8fe605024f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlDWSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py index cfed228321d6..6a11990fc720 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py @@ -15,6 +15,8 @@ class SqlMISink(CopySink): """A copy activity Azure SQL Managed Instance sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -80,12 +82,12 @@ class SqlMISink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option + def __init__(self, **kwargs): + super(SqlMISink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py new file mode 100644 index 000000000000..16fe41cf47f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py index 2a1c93867a30..4d4db9b09281 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py @@ -15,6 +15,8 @@ class SqlMISource(CopySource): """A copy activity Azure SQL Managed Instance source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SqlMISource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -62,10 +64,10 @@ class SqlMISource(CopySource): 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None): - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types + def __init__(self, **kwargs): + super(SqlMISource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py new file mode 100644 index 000000000000..952bc7b4da4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py index f894634427fe..45d342212ea4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -15,6 +15,8 @@ class SqlServerLinkedService(LinkedService): """SQL Server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class SqlServerLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param connection_string: The connection string. Type: string, + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param user_name: The on-premises Windows authentication user name. Type: @@ -63,10 +65,10 @@ class SqlServerLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, connection_string, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None): - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SqlServerLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py new file mode 100644 index 000000000000..3eb8c5063dc1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py index 0cdda8b343a3..b3cbe492bbf2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -15,6 +15,8 @@ class SqlServerSink(CopySink): """A copy activity SQL server sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -80,12 +82,12 @@ class SqlServerSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dd5daf2c5660 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlServerSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py index a8f6984d95a5..f9aa011047ea 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -15,6 +15,8 @@ class SqlServerSource(CopySource): """A copy activity SQL server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SqlServerSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -62,10 +64,10 @@ class SqlServerSource(CopySource): 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters - self.produce_additional_types = produce_additional_types + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py index bcf259485df2..6f31002f32d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py @@ -15,10 +15,12 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,15 +28,15 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param stored_procedure_name: Stored procedure name. Type: string (or - Expression with resultType string). + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). :type stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". @@ -61,8 +63,8 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, name, stored_procedure_name, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None): - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.stored_procedure_name = stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters + def __init__(self, **kwargs): + super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) + self.stored_procedure_name = kwargs.get('stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py new file mode 100644 index 000000000000..477f0c6c775c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlServerStoredProcedure' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py index f8cb18e56be1..3998671ee8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -15,6 +15,8 @@ class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SqlServerTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SqlServerTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class SqlServerTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None): - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.sql_server_table_dataset_schema = sql_server_table_dataset_schema - self.table = table + def __init__(self, **kwargs): + super(SqlServerTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.sql_server_table_dataset_schema = kwargs.get('sql_server_table_dataset_schema', None) + self.table = kwargs.get('table', None) self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py new file mode 100644 index 000000000000..989780c9bfda --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param sql_server_table_dataset_schema: The schema name of the SQL Server + dataset. Type: string (or Expression with resultType string). + :type sql_server_table_dataset_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.sql_server_table_dataset_schema = sql_server_table_dataset_schema + self.table = table + self.type = 'SqlServerTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index e35d38558224..3a81c5f7ea2f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -15,6 +15,8 @@ class SqlSink(CopySink): """A copy activity SQL sink. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). @@ -80,12 +82,12 @@ class SqlSink(CopySink): 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None): - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name - self.sql_writer_table_type = sql_writer_table_type - self.pre_copy_script = pre_copy_script - self.stored_procedure_parameters = stored_procedure_parameters - self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name - self.table_option = table_option + def __init__(self, **kwargs): + super(SqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py new file mode 100644 index 000000000000..d33810d9abef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + self.type = 'SqlSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py index 3c723a28cb39..bb31474b1f7c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -15,6 +15,8 @@ class SqlSource(CopySource): """A copy activity SQL source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SqlSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). @@ -59,9 +61,9 @@ class SqlSource(CopySource): 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.sql_reader_query = sql_reader_query - self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name - self.stored_procedure_parameters = stored_procedure_parameters + def __init__(self, **kwargs): + super(SqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py new file mode 100644 index 000000000000..dcad458fd4a6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py index cbfc41bf357d..4edfc8b211f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -15,6 +15,8 @@ class SquareLinkedService(LinkedService): """Square Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,17 +31,19 @@ class SquareLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The URL of the Square instance. (i.e. mystore.mysquare.com) + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) :type host: object - :param client_id: The client ID associated with your Square application. + :param client_id: Required. The client ID associated with your Square + application. :type client_id: object :param client_secret: The client secret associated with your Square application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase - :param redirect_uri: The redirect URL assigned in the Square application - dashboard. (i.e. http://localhost:2500) + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) :type redirect_uri: object :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -81,14 +85,14 @@ class SquareLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.redirect_uri = redirect_uri - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SquareLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.redirect_uri = kwargs.get('redirect_uri', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py new file mode 100644 index 000000000000..40719f600a18 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Square' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py index 8292782f5fdf..3903382d2e3a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py @@ -15,6 +15,8 @@ class SquareObjectDataset(Dataset): """Square Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SquareObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SquareObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class SquareObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(SquareObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py new file mode 100644 index 000000000000..6d624dc6feef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SquareObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py index 7ef5e5912ff0..f083df43f13a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -15,6 +15,8 @@ class SquareSource(CopySource): """A copy activity Square Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SquareSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SquareSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SquareSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py new file mode 100644 index 000000000000..ec8a741d564c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SquareSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py index 072c579366c4..63512fdec4d8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py @@ -15,11 +15,13 @@ class SSISAccessCredential(Model): """SSIS access credential. - :param domain: Domain for windows authentication. + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. :type domain: object - :param user_name: UseName for windows authentication. + :param user_name: Required. UseName for windows authentication. :type user_name: object - :param password: Password for windows authentication. + :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ @@ -35,8 +37,8 @@ class SSISAccessCredential(Model): 'password': {'key': 'password', 'type': 'SecureString'}, } - def __init__(self, domain, user_name, password): - super(SSISAccessCredential, self).__init__() - self.domain = domain - self.user_name = user_name - self.password = password + def __init__(self, **kwargs): + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py new file mode 100644 index 000000000000..5df0fc8941da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py index 95d2e838ec9b..5dff9764e2a2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -15,13 +15,15 @@ class SsisEnvironment(SsisObjectMetadata): """Ssis environment. + All required parameters must be populated in order to send to Azure. + :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_id: Folder id which contains environment. :type folder_id: long @@ -42,8 +44,8 @@ class SsisEnvironment(SsisObjectMetadata): 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, } - def __init__(self, id=None, name=None, description=None, folder_id=None, variables=None): - super(SsisEnvironment, self).__init__(id=id, name=name, description=description) - self.folder_id = folder_id - self.variables = variables + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py new file mode 100644 index 000000000000..43697ba62146 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py index 9a035fa35f35..e7d31d369392 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -32,9 +32,9 @@ class SsisEnvironmentReference(Model): 'reference_type': {'key': 'referenceType', 'type': 'str'}, } - def __init__(self, id=None, environment_folder_name=None, environment_name=None, reference_type=None): - super(SsisEnvironmentReference, self).__init__() - self.id = id - self.environment_folder_name = environment_folder_name - self.environment_name = environment_name - self.reference_type = reference_type + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py new file mode 100644 index 000000000000..14cbfca99d4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py index 3c56344ca7aa..c090694416a9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py @@ -15,11 +15,13 @@ class SSISExecutionCredential(Model): """SSIS package execution credential. - :param domain: Domain for windows authentication. + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. :type domain: object - :param user_name: UseName for windows authentication. + :param user_name: Required. UseName for windows authentication. :type user_name: object - :param password: Password for windows authentication. + :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ @@ -35,8 +37,8 @@ class SSISExecutionCredential(Model): 'password': {'key': 'password', 'type': 'SecureString'}, } - def __init__(self, domain, user_name, password): - super(SSISExecutionCredential, self).__init__() - self.domain = domain - self.user_name = user_name - self.password = password + def __init__(self, **kwargs): + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py new file mode 100644 index 000000000000..051eaffa2bf2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py index e57afb5b5798..36f295c5a4aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py @@ -15,8 +15,10 @@ class SSISExecutionParameter(Model): """SSIS execution parameter. - :param value: SSIS package execution parameter value. Type: string (or - Expression with resultType string). + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). :type value: object """ @@ -28,6 +30,6 @@ class SSISExecutionParameter(Model): 'value': {'key': 'value', 'type': 'object'}, } - def __init__(self, value): - super(SSISExecutionParameter, self).__init__() - self.value = value + def __init__(self, **kwargs): + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py new file mode 100644 index 000000000000..cd10dd457a42 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, value, **kwargs) -> None: + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py index c2fdf4ce689f..350b0d92852b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -15,13 +15,15 @@ class SsisFolder(SsisObjectMetadata): """Ssis folder. + All required parameters must be populated in order to send to Azure. + :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -29,6 +31,13 @@ class SsisFolder(SsisObjectMetadata): 'type': {'required': True}, } - def __init__(self, id=None, name=None, description=None): - super(SsisFolder, self).__init__(id=id, name=name, description=description) + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py new file mode 100644 index 000000000000..d6483fda2c08 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py index 96e3bdc053c4..cfdebe717541 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py @@ -18,10 +18,13 @@ class SSISLogLocation(Model): Variables are only populated by the server, and will be ignored when sending a request. - :param log_path: The SSIS package execution log path. Type: string (or - Expression with resultType string). + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). :type log_path: object - :ivar type: The type of SSIS log location. Default value: "File" . + :ivar type: Required. The type of SSIS log location. Default value: "File" + . :vartype type: str :param access_credential: The package execution log access credential. :type access_credential: @@ -47,8 +50,8 @@ class SSISLogLocation(Model): type = "File" - def __init__(self, log_path, access_credential=None, log_refresh_interval=None): - super(SSISLogLocation, self).__init__() - self.log_path = log_path - self.access_credential = access_credential - self.log_refresh_interval = log_refresh_interval + def __init__(self, **kwargs): + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = kwargs.get('log_path', None) + self.access_credential = kwargs.get('access_credential', None) + self.log_refresh_interval = kwargs.get('log_refresh_interval', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py new file mode 100644 index 000000000000..de4fbe35dcb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py index b14732944663..811075137f41 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -18,13 +18,15 @@ class SsisObjectMetadata(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + All required parameters must be populated in order to send to Azure. + :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -43,9 +45,9 @@ class SsisObjectMetadata(Model): 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} } - def __init__(self, id=None, name=None, description=None): - super(SsisObjectMetadata, self).__init__() - self.id = id - self.name = name - self.description = description + def __init__(self, **kwargs): + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py index 2d07080418d8..a029c9f7ebc4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py @@ -27,7 +27,7 @@ class SsisObjectMetadataListResponse(Model): 'next_link': {'key': 'nextLink', 'type': 'str'}, } - def __init__(self, value=None, next_link=None): - super(SsisObjectMetadataListResponse, self).__init__() - self.value = value - self.next_link = next_link + def __init__(self, **kwargs): + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py new file mode 100644 index 000000000000..79931e1ceaf7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py new file mode 100644 index 000000000000..45f7e15af4fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py index 51424c6aefcc..9b782613ee08 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py @@ -32,9 +32,9 @@ class SsisObjectMetadataStatusResponse(Model): 'error': {'key': 'error', 'type': 'str'}, } - def __init__(self, status=None, name=None, properties=None, error=None): - super(SsisObjectMetadataStatusResponse, self).__init__() - self.status = status - self.name = name - self.properties = properties - self.error = error + def __init__(self, **kwargs): + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.name = kwargs.get('name', None) + self.properties = kwargs.get('properties', None) + self.error = kwargs.get('error', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py new file mode 100644 index 000000000000..a4b82b8f6bcd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = status + self.name = name + self.properties = properties + self.error = error diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py index 9b1d18ce1ee6..b04fc1138797 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -15,13 +15,15 @@ class SsisPackage(SsisObjectMetadata): """Ssis Package. + All required parameters must be populated in order to send to Azure. + :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_id: Folder id which contains package. :type folder_id: long @@ -48,10 +50,10 @@ class SsisPackage(SsisObjectMetadata): 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } - def __init__(self, id=None, name=None, description=None, folder_id=None, project_version=None, project_id=None, parameters=None): - super(SsisPackage, self).__init__(id=id, name=name, description=description) - self.folder_id = folder_id - self.project_version = project_version - self.project_id = project_id - self.parameters = parameters + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py index 3e44834cdfb0..248d0aa9b8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -15,8 +15,10 @@ class SSISPackageLocation(Model): """SSIS package location. - :param package_path: The SSIS package path. Type: string (or Expression - with resultType string). + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). :type package_path: object :param type: The type of SSIS package location. Possible values include: 'SSISDB', 'File' @@ -43,10 +45,10 @@ class SSISPackageLocation(Model): 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } - def __init__(self, package_path, type=None, package_password=None, access_credential=None, configuration_path=None): - super(SSISPackageLocation, self).__init__() - self.package_path = package_path - self.type = type - self.package_password = package_password - self.access_credential = access_credential - self.configuration_path = configuration_path + def __init__(self, **kwargs): + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = kwargs.get('package_path', None) + self.type = kwargs.get('type', None) + self.package_password = kwargs.get('package_password', None) + self.access_credential = kwargs.get('access_credential', None) + self.configuration_path = kwargs.get('configuration_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py new file mode 100644 index 000000000000..cc442d8d35b8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + } + + def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py new file mode 100644 index 000000000000..e1e932e97ae6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py index c75b2cf9a1f0..c456af0bab48 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -56,17 +56,17 @@ class SsisParameter(Model): 'variable': {'key': 'variable', 'type': 'str'}, } - def __init__(self, id=None, name=None, description=None, data_type=None, required=None, sensitive=None, design_default_value=None, default_value=None, sensitive_default_value=None, value_type=None, value_set=None, variable=None): - super(SsisParameter, self).__init__() - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.required = required - self.sensitive = sensitive - self.design_default_value = design_default_value - self.default_value = default_value - self.sensitive_default_value = sensitive_default_value - self.value_type = value_type - self.value_set = value_set - self.variable = variable + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py new file mode 100644 index 000000000000..6a4ff73768f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py index 979a07bb1e80..c29a36fb628e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -15,13 +15,15 @@ class SsisProject(SsisObjectMetadata): """Ssis project. + All required parameters must be populated in order to send to Azure. + :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param folder_id: Folder id which contains project. :type folder_id: long @@ -49,10 +51,10 @@ class SsisProject(SsisObjectMetadata): 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } - def __init__(self, id=None, name=None, description=None, folder_id=None, version=None, environment_refs=None, parameters=None): - super(SsisProject, self).__init__(id=id, name=name, description=description) - self.folder_id = folder_id - self.version = version - self.environment_refs = environment_refs - self.parameters = parameters + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py new file mode 100644 index 000000000000..11b95a644e2f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py index 85e6d4645b9a..30b78594e6ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py @@ -15,8 +15,10 @@ class SSISPropertyOverride(Model): """SSIS property override. - :param value: SSIS package property override value. Type: string (or - Expression with resultType string). + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). :type value: object :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true @@ -32,7 +34,7 @@ class SSISPropertyOverride(Model): 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, } - def __init__(self, value, is_sensitive=None): - super(SSISPropertyOverride, self).__init__() - self.value = value - self.is_sensitive = is_sensitive + def __init__(self, **kwargs): + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.is_sensitive = kwargs.get('is_sensitive', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py new file mode 100644 index 000000000000..b425a19adc7e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = value + self.is_sensitive = is_sensitive diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py index 8e12fda20149..73fda3b27967 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -41,12 +41,12 @@ class SsisVariable(Model): 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, } - def __init__(self, id=None, name=None, description=None, data_type=None, sensitive=None, value=None, sensitive_value=None): - super(SsisVariable, self).__init__() - self.id = id - self.name = name - self.description = description - self.data_type = data_type - self.sensitive = sensitive - self.value = value - self.sensitive_value = sensitive_value + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py new file mode 100644 index 000000000000..e709842ff465 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py index 61efe881513e..05ca8dff2c52 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py @@ -15,10 +15,12 @@ class StagingSettings(Model): """Staging settings. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param linked_service_name: Staging linked service reference. + :param linked_service_name: Required. Staging linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: @@ -41,9 +43,9 @@ class StagingSettings(Model): 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, path=None, enable_compression=None): - super(StagingSettings, self).__init__() - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path - self.enable_compression = enable_compression + def __init__(self, **kwargs): + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + self.enable_compression = kwargs.get('enable_compression', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py new file mode 100644 index 000000000000..13b4353963a3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + self.enable_compression = enable_compression diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py index 0f70fa4c1339..c12c0ce8860d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings.py @@ -15,10 +15,12 @@ class StoreReadSettings(Model): """Connector read setting. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: The read setting type. + :param type: Required. The read setting type. :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType @@ -36,8 +38,8 @@ class StoreReadSettings(Model): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, } - def __init__(self, type, additional_properties=None, max_concurrent_connections=None): - super(StoreReadSettings, self).__init__() - self.additional_properties = additional_properties - self.type = type - self.max_concurrent_connections = max_concurrent_connections + def __init__(self, **kwargs): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py new file mode 100644 index 000000000000..e2026fd52b93 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_read_settings_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreReadSettings(Model): + """Connector read setting. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py index 74810b8e51b6..728b8cdd8c89 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings.py @@ -19,6 +19,8 @@ class StoreWriteSettings(Model): sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -28,7 +30,7 @@ class StoreWriteSettings(Model): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -47,9 +49,9 @@ class StoreWriteSettings(Model): 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} } - def __init__(self, additional_properties=None, max_concurrent_connections=None, copy_behavior=None): - super(StoreWriteSettings, self).__init__() - self.additional_properties = additional_properties - self.max_concurrent_connections = max_concurrent_connections - self.copy_behavior = copy_behavior + def __init__(self, **kwargs): + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py new file mode 100644 index 000000000000..7cce5d205541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/store_write_settings_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoreWriteSettings(Model): + """Connector write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py index d480dc7f0931..ff16595aa8c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -29,7 +29,7 @@ class StoredProcedureParameter(Model): 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, value=None, type=None): - super(StoredProcedureParameter, self).__init__() - self.value = value - self.type = type + def __init__(self, **kwargs): + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = kwargs.get('type', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py new file mode 100644 index 000000000000..2842ef9ae35c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, value=None, type=None, **kwargs) -> None: + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = value + self.type = type diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py index 89a27ab2a0c3..c80b531db7d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py @@ -42,8 +42,8 @@ class SubResource(Model): 'etag': {'key': 'etag', 'type': 'str'}, } - def __init__(self): - super(SubResource, self).__init__() + def __init__(self, **kwargs): + super(SubResource, self).__init__(**kwargs) self.id = None self.name = None self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py new file mode 100644 index 000000000000..3b2d9ec62366 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py index 6fef081a9dbc..83de0e6f61f2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -15,6 +15,8 @@ class SybaseLinkedService(LinkedService): """Linked service for Sybase data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,13 +31,13 @@ class SybaseLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param server: Server name for connection. Type: string (or Expression - with resultType string). + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). :type server: object - :param database: Database name for connection. Type: string (or Expression - with resultType string). + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). :type database: object :param schema: Schema name for connection. Type: string (or Expression with resultType string). @@ -77,13 +79,13 @@ class SybaseLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None): - super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.server = server - self.database = database - self.schema = schema - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(SybaseLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.schema = kwargs.get('schema', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py new file mode 100644 index 000000000000..5b6cc0ce6ded --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.schema = schema + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Sybase' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py index 9fd5a579917c..02f89a8fca25 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source.py @@ -15,6 +15,8 @@ class SybaseSource(CopySource): """A copy activity source for Sybase databases. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class SybaseSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class SybaseSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(SybaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py new file mode 100644 index 000000000000..c11e96174349 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SybaseSource(CopySource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SybaseSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py index a1aab1b6cdb2..ff2dfd5471fb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset.py @@ -15,6 +15,8 @@ class SybaseTableDataset(Dataset): """The Sybase table dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class SybaseTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class SybaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The Sybase table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class SybaseTableDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(SybaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py new file mode 100644 index 000000000000..88e9d3c287fe --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Sybase table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SybaseTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index a1da6b219d03..6e02b0d389ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -15,6 +15,8 @@ class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class TeradataLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -71,12 +73,12 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None): - super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py new file mode 100644 index 000000000000..aac40efe69e0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Teradata' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py index b093db33f980..0f9c023f9553 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -35,8 +35,8 @@ class TeradataPartitionSettings(Model): 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - def __init__(self, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None): - super(TeradataPartitionSettings, self).__init__() - self.partition_column_name = partition_column_name - self.partition_upper_bound = partition_upper_bound - self.partition_lower_bound = partition_lower_bound + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py new file mode 100644 index 000000000000..04824e614ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py index 9255ab18e746..81d1c8e202c1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -15,6 +15,8 @@ class TeradataSource(CopySource): """A copy activity Teradata source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class TeradataSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: Teradata query. Type: string (or Expression with resultType string). @@ -60,9 +62,9 @@ class TeradataSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None): - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query - self.partition_option = partition_option - self.partition_settings = partition_settings + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py new file mode 100644 index 000000000000..79d8ccb01f14 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py index 8aceb5aeeaeb..e396bfd6fb15 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -15,6 +15,8 @@ class TeradataTableDataset(Dataset): """The Teradata database dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class TeradataTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class TeradataTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param database: The database name of Teradata. Type: string (or Expression with resultType string). @@ -68,8 +70,8 @@ class TeradataTableDataset(Dataset): 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None): - super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.database = database - self.table = table + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py new file mode 100644 index 000000000000..892707b7f133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py index 60d963930e56..48f32bf10133 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py @@ -15,6 +15,8 @@ class TextFormat(DatasetStorageFormat): """The data stored in text format. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -24,7 +26,7 @@ class TextFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). @@ -83,15 +85,15 @@ class TextFormat(DatasetStorageFormat): 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, } - def __init__(self, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None): - super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer) - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.escape_char = escape_char - self.quote_char = quote_char - self.null_value = null_value - self.encoding_name = encoding_name - self.treat_empty_as_null = treat_empty_as_null - self.skip_line_count = skip_line_count - self.first_row_as_header = first_row_as_header + def __init__(self, **kwargs): + super(TextFormat, self).__init__(**kwargs) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.escape_char = kwargs.get('escape_char', None) + self.quote_char = kwargs.get('quote_char', None) + self.null_value = kwargs.get('null_value', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py new file mode 100644 index 000000000000..0d876f62b112 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header + self.type = 'TextFormat' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py index 3350057fbeae..728ffc32bcb5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -23,6 +23,8 @@ class Trigger(Model): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -36,7 +38,7 @@ class Trigger(Model): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -57,10 +59,10 @@ class Trigger(Model): 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } - def __init__(self, additional_properties=None, description=None, annotations=None): - super(Trigger, self).__init__() - self.additional_properties = additional_properties - self.description = description + def __init__(self, **kwargs): + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) self.runtime_state = None - self.annotations = annotations + self.annotations = kwargs.get('annotations', None) self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py index fb87ceffcc45..089aa9a3e5fc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py @@ -18,9 +18,11 @@ class TriggerDependencyReference(DependencyReference): You probably want to use the sub-classes and not this class directly. Known sub-classes are: TumblingWindowTriggerDependencyReference - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str - :param reference_trigger: Referenced trigger. + :param reference_trigger: Required. Referenced trigger. :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ @@ -38,7 +40,7 @@ class TriggerDependencyReference(DependencyReference): 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} } - def __init__(self, reference_trigger): - super(TriggerDependencyReference, self).__init__() - self.reference_trigger = reference_trigger + def __init__(self, **kwargs): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = kwargs.get('reference_trigger', None) self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py new file mode 100644 index 000000000000..716a0d926f8b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference_py3 import DependencyReference + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, *, reference_trigger, **kwargs) -> None: + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = reference_trigger + self.type = 'TriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py index 52d0dc2609bf..70c9f2904347 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py @@ -26,7 +26,7 @@ class TriggerPipelineReference(Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - def __init__(self, pipeline_reference=None, parameters=None): - super(TriggerPipelineReference, self).__init__() - self.pipeline_reference = pipeline_reference - self.parameters = parameters + def __init__(self, **kwargs): + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = kwargs.get('pipeline_reference', None) + self.parameters = kwargs.get('parameters', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py new file mode 100644 index 000000000000..e32af8006326 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = pipeline_reference + self.parameters = parameters diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py new file mode 100644 index 000000000000..862973544ab4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.runtime_state = None + self.annotations = annotations + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py index bc87f79c1f24..a4f952dac85f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py @@ -18,9 +18,12 @@ class TriggerReference(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: Trigger reference type. Default value: "TriggerReference" . + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . :vartype type: str - :param reference_name: Reference trigger name. + :param reference_name: Required. Reference trigger name. :type reference_name: str """ @@ -36,6 +39,6 @@ class TriggerReference(Model): type = "TriggerReference" - def __init__(self, reference_name): - super(TriggerReference, self).__init__() - self.reference_name = reference_name + def __init__(self, **kwargs): + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py new file mode 100644 index 000000000000..805e407e80a7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, *, reference_name: str, **kwargs) -> None: + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = reference_name diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py index ea35206bbed7..539ac4775350 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py @@ -18,6 +18,8 @@ class TriggerResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -26,7 +28,7 @@ class TriggerResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param properties: Properties of the trigger. + :param properties: Required. Properties of the trigger. :type properties: ~azure.mgmt.datafactory.models.Trigger """ @@ -46,6 +48,6 @@ class TriggerResource(SubResource): 'properties': {'key': 'properties', 'type': 'Trigger'}, } - def __init__(self, properties): - super(TriggerResource, self).__init__() - self.properties = properties + def __init__(self, **kwargs): + super(TriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py new file mode 100644 index 000000000000..ae6a04ac3128 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(TriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py index 2fb74b9dcbab..9fad7bbfd9fa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py @@ -65,9 +65,9 @@ class TriggerRun(Model): 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, } - def __init__(self, additional_properties=None): - super(TriggerRun, self).__init__() - self.additional_properties = additional_properties + def __init__(self, **kwargs): + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) self.trigger_run_id = None self.trigger_name = None self.trigger_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py new file mode 100644 index 000000000000..5a9fe50f6894 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py index a6dbecd8a524..7684fe7eb7dc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py @@ -15,7 +15,9 @@ class TriggerRunsQueryResponse(Model): """A list of trigger runs. - :param value: List of trigger runs. + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. :type value: list[~azure.mgmt.datafactory.models.TriggerRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. @@ -31,7 +33,7 @@ class TriggerRunsQueryResponse(Model): 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } - def __init__(self, value, continuation_token=None): - super(TriggerRunsQueryResponse, self).__init__() - self.value = value - self.continuation_token = continuation_token + def __init__(self, **kwargs): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py new file mode 100644 index 000000000000..391a2441b3d1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py index 58125f8327c3..6a581e757840 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status.py @@ -36,7 +36,7 @@ class TriggerSubscriptionOperationStatus(Model): 'status': {'key': 'status', 'type': 'str'}, } - def __init__(self): - super(TriggerSubscriptionOperationStatus, self).__init__() + def __init__(self, **kwargs): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) self.trigger_name = None self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py new file mode 100644 index 000000000000..40ead4c50fe4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_subscription_operation_status_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerSubscriptionOperationStatus(Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: + 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' + :vartype status: str or + ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py index f69967dcc186..939624ae5042 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -20,6 +20,8 @@ class TumblingWindowTrigger(Trigger): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -33,21 +35,21 @@ class TumblingWindowTrigger(Trigger): :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param pipeline: Pipeline for which runs are created when an event is - fired for trigger window that is ready. + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference - :param frequency: The frequency of the time windows. Possible values - include: 'Minute', 'Hour' + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency - :param interval: The interval of the time windows. The minimum interval - allowed is 15 Minutes. + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. :type interval: int - :param start_time: The start time for the time period for the trigger - during which events are fired for windows that are ready. Only UTC time is - currently supported. + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. :type start_time: datetime :param end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is @@ -58,8 +60,8 @@ class TumblingWindowTrigger(Trigger): default is 0. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type delay: object - :param max_concurrency: The max number of parallel time windows (ready for - execution) for which a new run is triggered. + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. :type max_concurrency: int :param retry_policy: Retry policy that will be applied for failed pipeline runs. @@ -96,15 +98,15 @@ class TumblingWindowTrigger(Trigger): 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } - def __init__(self, pipeline, frequency, interval, start_time, max_concurrency, additional_properties=None, description=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None): - super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations) - self.pipeline = pipeline - self.frequency = frequency - self.interval = interval - self.start_time = start_time - self.end_time = end_time - self.delay = delay - self.max_concurrency = max_concurrency - self.retry_policy = retry_policy - self.depends_on = depends_on + def __init__(self, **kwargs): + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py index 67f5592f3cdb..89dcefbc8c09 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py @@ -15,9 +15,11 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): """Referenced tumbling window trigger dependency. - :param type: Constant filled by server. + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. :type type: str - :param reference_trigger: Referenced trigger. + :param reference_trigger: Required. Referenced trigger. :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :param offset: Timespan applied to the start time of a tumbling window when evaluating dependency. @@ -41,8 +43,8 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): 'size': {'key': 'size', 'type': 'str'}, } - def __init__(self, reference_trigger, offset=None, size=None): - super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger) - self.offset = offset - self.size = size + def __init__(self, **kwargs): + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py new file mode 100644 index 000000000000..648f25e59937 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_dependency_reference_py3 import TriggerDependencyReference + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.offset = offset + self.size = size + self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..6856629c8b91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on + self.type = 'TumblingWindowTrigger' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py index 402d3f737131..eede36501d6c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py @@ -16,10 +16,12 @@ class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -27,10 +29,10 @@ class UntilActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param expression: An expression that would evaluate to Boolean. The loop - will continue until this expression evaluates to true + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true :type expression: ~azure.mgmt.datafactory.models.Expression :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 @@ -39,7 +41,7 @@ class UntilActivity(ControlActivity): string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object - :param activities: List of activities to execute. + :param activities: Required. List of activities to execute. :type activities: list[~azure.mgmt.datafactory.models.Activity] """ @@ -62,9 +64,9 @@ class UntilActivity(ControlActivity): 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, } - def __init__(self, name, expression, activities, additional_properties=None, description=None, depends_on=None, user_properties=None, timeout=None): - super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.expression = expression - self.timeout = timeout - self.activities = activities + def __init__(self, **kwargs): + super(UntilActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.timeout = kwargs.get('timeout', None) + self.activities = kwargs.get('activities', None) self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py new file mode 100644 index 000000000000..40c03ce18591 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: + super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.timeout = timeout + self.activities = activities + self.type = 'Until' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py index 6bb70551971d..c6460310225a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py @@ -29,6 +29,6 @@ class UpdateIntegrationRuntimeNodeRequest(Model): 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, } - def __init__(self, concurrent_jobs_limit=None): - super(UpdateIntegrationRuntimeNodeRequest, self).__init__() - self.concurrent_jobs_limit = concurrent_jobs_limit + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py new file mode 100644 index 000000000000..de1605885139 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = concurrent_jobs_limit diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py index f4e02900146a..bd5e332b50f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py @@ -32,7 +32,7 @@ class UpdateIntegrationRuntimeRequest(Model): 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } - def __init__(self, auto_update=None, update_delay_offset=None): - super(UpdateIntegrationRuntimeRequest, self).__init__() - self.auto_update = auto_update - self.update_delay_offset = update_delay_offset + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = kwargs.get('auto_update', None) + self.update_delay_offset = kwargs.get('update_delay_offset', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py new file mode 100644 index 000000000000..731cb942b472 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = auto_update + self.update_delay_offset = update_delay_offset diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py index d640439f57e4..b51e313b6f0c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py @@ -42,10 +42,10 @@ class UserAccessPolicy(Model): 'expire_time': {'key': 'expireTime', 'type': 'str'}, } - def __init__(self, permissions=None, access_resource_path=None, profile_name=None, start_time=None, expire_time=None): - super(UserAccessPolicy, self).__init__() - self.permissions = permissions - self.access_resource_path = access_resource_path - self.profile_name = profile_name - self.start_time = start_time - self.expire_time = expire_time + def __init__(self, **kwargs): + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = kwargs.get('permissions', None) + self.access_resource_path = kwargs.get('access_resource_path', None) + self.profile_name = kwargs.get('profile_name', None) + self.start_time = kwargs.get('start_time', None) + self.expire_time = kwargs.get('expire_time', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py new file mode 100644 index 000000000000..26e2a7639a09 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = permissions + self.access_resource_path = access_resource_path + self.profile_name = profile_name + self.start_time = start_time + self.expire_time = expire_time diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py index 4356304f3f08..30692d2960ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py @@ -15,10 +15,12 @@ class UserProperty(Model): """User property. - :param name: User property name. + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. :type name: str - :param value: User property value. Type: string (or Expression with - resultType string). + :param value: Required. User property value. Type: string (or Expression + with resultType string). :type value: object """ @@ -32,7 +34,7 @@ class UserProperty(Model): 'value': {'key': 'value', 'type': 'object'}, } - def __init__(self, name, value): - super(UserProperty, self).__init__() - self.name = name - self.value = value + def __init__(self, **kwargs): + super(UserProperty, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py new file mode 100644 index 000000000000..7b4f3beb0195 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, name: str, value, **kwargs) -> None: + super(UserProperty, self).__init__(**kwargs) + self.name = name + self.value = value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py index 6642382c3fd8..0d92d32c12b0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -15,10 +15,12 @@ class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,7 +28,7 @@ class ValidationActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 @@ -45,7 +47,7 @@ class ValidationActivity(ControlActivity): true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). :type child_items: object - :param dataset: Validation activity dataset reference. + :param dataset: Required. Validation activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -69,11 +71,11 @@ class ValidationActivity(ControlActivity): 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } - def __init__(self, name, dataset, additional_properties=None, description=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None): - super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.timeout = timeout - self.sleep = sleep - self.minimum_size = minimum_size - self.child_items = child_items - self.dataset = dataset + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py new file mode 100644 index 000000000000..f4680400b447 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py index aac68001fe4c..6d7fd808fa44 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py @@ -15,8 +15,10 @@ class VariableSpecification(Model): """Definition of a single variable for a Pipeline. - :param type: Variable type. Possible values include: 'String', 'Bool', - 'Array' + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. :type default_value: object @@ -31,7 +33,7 @@ class VariableSpecification(Model): 'default_value': {'key': 'defaultValue', 'type': 'object'}, } - def __init__(self, type, default_value=None): - super(VariableSpecification, self).__init__() - self.type = type - self.default_value = default_value + def __init__(self, **kwargs): + super(VariableSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py new file mode 100644 index 000000000000..d60b3b4b1591 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(VariableSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py index c7898ef92a77..6b5e8d0103f5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -15,6 +15,8 @@ class VerticaLinkedService(LinkedService): """Vertica linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class VerticaLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -59,9 +61,9 @@ class VerticaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None): - super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(VerticaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py new file mode 100644 index 000000000000..3aee3a5ae0f6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Vertica' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py index 8a9d03f58a3d..d0b642f15d38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -15,6 +15,8 @@ class VerticaSource(CopySource): """A copy activity Vertica source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class VerticaSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class VerticaSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(VerticaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py new file mode 100644 index 000000000000..a1c4d755f2b4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'VerticaSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py index 6463481f2e9d..151a0d000e3f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -15,6 +15,8 @@ class VerticaTableDataset(Dataset): """Vertica dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class VerticaTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class VerticaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. @@ -72,9 +74,9 @@ class VerticaTableDataset(Dataset): 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None): - super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name - self.table = table - self.vertica_table_dataset_schema = vertica_table_dataset_schema + def __init__(self, **kwargs): + super(VerticaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.vertica_table_dataset_schema = kwargs.get('vertica_table_dataset_schema', None) self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py new file mode 100644 index 000000000000..4c2fc8da32ad --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param table: The table name of the Vertica. Type: string (or Expression + with resultType string). + :type table: object + :param vertica_table_dataset_schema: The schema name of the Vertica. Type: + string (or Expression with resultType string). + :type vertica_table_dataset_schema: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: + super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.table = table + self.vertica_table_dataset_schema = vertica_table_dataset_schema + self.type = 'VerticaTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py index 88b4ec622a20..91f3decc7473 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py @@ -15,10 +15,12 @@ class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,9 +28,9 @@ class WaitActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param wait_time_in_seconds: Duration in seconds. + :param wait_time_in_seconds: Required. Duration in seconds. :type wait_time_in_seconds: int """ @@ -48,7 +50,7 @@ class WaitActivity(ControlActivity): 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, } - def __init__(self, name, wait_time_in_seconds, additional_properties=None, description=None, depends_on=None, user_properties=None): - super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.wait_time_in_seconds = wait_time_in_seconds + def __init__(self, **kwargs): + super(WaitActivity, self).__init__(**kwargs) + self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py new file mode 100644 index 000000000000..ff85c9d16733 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.wait_time_in_seconds = wait_time_in_seconds + self.type = 'Wait' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py index cf7b9d40df3d..70264719d52e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py @@ -15,10 +15,12 @@ class WebActivity(ExecutionActivity): """Web activity. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -26,18 +28,18 @@ class WebActivity(ExecutionActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param method: Rest API method for target endpoint. Possible values - include: 'GET', 'POST', 'PUT', 'DELETE' + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod - :param url: Web activity target endpoint and path. Type: string (or - Expression with resultType string). + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). :type url: object :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { @@ -84,13 +86,13 @@ class WebActivity(ExecutionActivity): 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, } - def __init__(self, name, method, url, additional_properties=None, description=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None): - super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy) - self.method = method - self.url = url - self.headers = headers - self.body = body - self.authentication = authentication - self.datasets = datasets - self.linked_services = linked_services + def __init__(self, **kwargs): + super(WebActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.url = kwargs.get('url', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py index 3f2d35b9b62e..6ebb193ae5e9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py @@ -15,7 +15,10 @@ class WebActivityAuthentication(Model): """Web activity authentication properties. - :param type: Web activity authentication (Basic/ClientCertificate/MSI) + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) :type type: str :param pfx: Base64-encoded contents of a PFX file. :type pfx: ~azure.mgmt.datafactory.models.SecureString @@ -41,10 +44,10 @@ class WebActivityAuthentication(Model): 'resource': {'key': 'resource', 'type': 'str'}, } - def __init__(self, type, pfx=None, username=None, password=None, resource=None): - super(WebActivityAuthentication, self).__init__() - self.type = type - self.pfx = pfx - self.username = username - self.password = password - self.resource = resource + def __init__(self, **kwargs): + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.pfx = kwargs.get('pfx', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.resource = kwargs.get('resource', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py new file mode 100644 index 000000000000..4c2b68ba7161 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = type + self.pfx = pfx + self.username = username + self.password = password + self.resource = resource diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py new file mode 100644 index 000000000000..9a64114a00c6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: + super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.url = url + self.headers = headers + self.body = body + self.authentication = authentication + self.datasets = datasets + self.linked_services = linked_services + self.type = 'WebActivity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py index 4b95d5b488ff..d3bd2f2594ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py @@ -16,11 +16,13 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. - :param url: The URL of the web service endpoint, e.g. + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Constant filled by server. + :param authentication_type: Required. Constant filled by server. :type authentication_type: str """ @@ -29,6 +31,11 @@ class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): 'authentication_type': {'required': True}, } - def __init__(self, url): - super(WebAnonymousAuthentication, self).__init__(url=url) + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebAnonymousAuthentication, self).__init__(**kwargs) self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py new file mode 100644 index 000000000000..ee7a4e780a1f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) + self.authentication_type = 'Anonymous' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py index f32f30499a14..90050f7dae28 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py @@ -16,16 +16,18 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. - :param url: The URL of the web service endpoint, e.g. + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Constant filled by server. + :param authentication_type: Required. Constant filled by server. :type authentication_type: str - :param username: User name for Basic authentication. Type: string (or - Expression with resultType string). + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). :type username: object - :param password: The password for Basic authentication. + :param password: Required. The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -43,8 +45,8 @@ class WebBasicAuthentication(WebLinkedServiceTypeProperties): 'password': {'key': 'password', 'type': 'SecretBase'}, } - def __init__(self, url, username, password): - super(WebBasicAuthentication, self).__init__(url=url) - self.username = username - self.password = password + def __init__(self, **kwargs): + super(WebBasicAuthentication, self).__init__(**kwargs) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py new file mode 100644 index 000000000000..71577ec86565 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, username, password, **kwargs) -> None: + super(WebBasicAuthentication, self).__init__(url=url, **kwargs) + self.username = username + self.password = password + self.authentication_type = 'Basic' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py index 2f4103a772ca..671808ca85d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py @@ -18,15 +18,17 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): authentication; the server must also provide valid credentials to the client. - :param url: The URL of the web service endpoint, e.g. + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Constant filled by server. + :param authentication_type: Required. Constant filled by server. :type authentication_type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Required. Base64-encoded contents of a PFX file. :type pfx: ~azure.mgmt.datafactory.models.SecretBase - :param password: Password for the PFX file. + :param password: Required. Password for the PFX file. :type password: ~azure.mgmt.datafactory.models.SecretBase """ @@ -44,8 +46,8 @@ class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): 'password': {'key': 'password', 'type': 'SecretBase'}, } - def __init__(self, url, pfx, password): - super(WebClientCertificateAuthentication, self).__init__(url=url) - self.pfx = pfx - self.password = password + def __init__(self, **kwargs): + super(WebClientCertificateAuthentication, self).__init__(**kwargs) + self.pfx = kwargs.get('pfx', None) + self.password = kwargs.get('password', None) self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py new file mode 100644 index 000000000000..7ac859b677a8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, pfx, password, **kwargs) -> None: + super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) + self.pfx = pfx + self.password = password + self.authentication_type = 'ClientCertificate' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py index 738430128169..1c648c42c3e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -18,10 +18,12 @@ class WebHookActivity(ControlActivity): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Activity name. + :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str @@ -29,12 +31,13 @@ class WebHookActivity(ControlActivity): :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :ivar method: Rest API method for target endpoint. Default value: "POST" . + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . :vartype method: str - :param url: WebHook activity target endpoint and path. Type: string (or - Expression with resultType string). + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). :type url: object :param timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: @@ -79,11 +82,11 @@ class WebHookActivity(ControlActivity): method = "POST" - def __init__(self, name, url, additional_properties=None, description=None, depends_on=None, user_properties=None, timeout=None, headers=None, body=None, authentication=None): - super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties) - self.url = url - self.timeout = timeout - self.headers = headers - self.body = body - self.authentication = authentication + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py new file mode 100644 index 000000000000..40cdc6f732da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py index 990c613e5b0a..18fadba3f3ee 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -15,6 +15,8 @@ class WebLinkedService(LinkedService): """Web linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class WebLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param type_properties: Web linked service properties. + :param type_properties: Required. Web linked service properties. :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ @@ -51,7 +53,7 @@ class WebLinkedService(LinkedService): 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, } - def __init__(self, type_properties, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): - super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.type_properties = type_properties + def __init__(self, **kwargs): + super(WebLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py new file mode 100644 index 000000000000..3e491b0fac4d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'Web' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py index 684401273413..22290e80b19f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py @@ -20,11 +20,13 @@ class WebLinkedServiceTypeProperties(Model): sub-classes are: WebClientCertificateAuthentication, WebBasicAuthentication, WebAnonymousAuthentication - :param url: The URL of the web service endpoint, e.g. + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object - :param authentication_type: Constant filled by server. + :param authentication_type: Required. Constant filled by server. :type authentication_type: str """ @@ -42,7 +44,7 @@ class WebLinkedServiceTypeProperties(Model): 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} } - def __init__(self, url): - super(WebLinkedServiceTypeProperties, self).__init__() - self.url = url + def __init__(self, **kwargs): + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = kwargs.get('url', None) self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py new file mode 100644 index 000000000000..1c162c2f1004 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = url + self.authentication_type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py index 9e8a62e52127..c5d3a2a8f00a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -15,6 +15,8 @@ class WebSource(CopySource): """A copy activity source for web page table. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class WebSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str """ @@ -37,6 +39,14 @@ class WebSource(CopySource): 'type': {'required': True}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None): - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebSource, self).__init__(**kwargs) self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py new file mode 100644 index 000000000000..684e1d4233cc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'WebSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py index 4933b4aa17c8..3980fe3d885a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py @@ -15,6 +15,8 @@ class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class WebTableDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,10 +41,10 @@ class WebTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param index: The zero-based index of the table in the web page. Type: - integer (or Expression with resultType integer), minimum: 0. + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. :type index: object :param path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). @@ -69,8 +71,8 @@ class WebTableDataset(Dataset): 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, linked_service_name, index, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None): - super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.index = index - self.path = path + def __init__(self, **kwargs): + super(WebTableDataset, self).__init__(**kwargs) + self.index = kwargs.get('index', None) + self.path = kwargs.get('path', None) self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py new file mode 100644 index 000000000000..edb2344c35d2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index = index + self.path = path + self.type = 'WebTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py index afdac985e31f..24973f577133 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -15,6 +15,8 @@ class XeroLinkedService(LinkedService): """Xero Service linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,10 @@ class XeroLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param host: The endpoint of the Xero server. (i.e. api.xero.com) + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) :type host: object :param consumer_key: The consumer key associated with the Xero application. @@ -78,13 +81,13 @@ class XeroLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, host, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.host = host - self.consumer_key = consumer_key - self.private_key = private_key - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(XeroLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.private_key = kwargs.get('private_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py new file mode 100644 index 000000000000..433c65ade739 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.consumer_key = consumer_key + self.private_key = private_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Xero' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py index d303efa94b46..53c5edd44cec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py @@ -15,6 +15,8 @@ class XeroObjectDataset(Dataset): """Xero Service dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class XeroObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class XeroObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class XeroObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(XeroObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py new file mode 100644 index 000000000000..673d41e1771e --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'XeroObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py index 2fa6679fda05..a37852a5b419 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -15,6 +15,8 @@ class XeroSource(CopySource): """A copy activity Xero Service source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class XeroSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class XeroSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(XeroSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py new file mode 100644 index 000000000000..bbee6c6fa1f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'XeroSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py index e4497be6cb19..fe34dff77ea9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -15,6 +15,8 @@ class ZohoLinkedService(LinkedService): """Zoho server linked service. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,9 +31,9 @@ class ZohoLinkedService(LinkedService): :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str - :param endpoint: The endpoint of the Zoho server. (i.e. + :param endpoint: Required. The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) :type endpoint: object :param access_token: The access token for Zoho authentication. @@ -72,12 +74,12 @@ class ZohoLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, endpoint, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None): - super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) - self.endpoint = endpoint - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + def __init__(self, **kwargs): + super(ZohoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py new file mode 100644 index 000000000000..f82f6221592b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Zoho' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py index 531272808f3c..062d508860a6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py @@ -15,6 +15,8 @@ class ZohoObjectDataset(Dataset): """Zoho server dataset. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -27,7 +29,7 @@ class ZohoObjectDataset(Dataset): dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object - :param linked_service_name: Linked service reference. + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. @@ -39,7 +41,7 @@ class ZohoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). @@ -64,7 +66,7 @@ class ZohoObjectDataset(Dataset): 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } - def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None): - super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder) - self.table_name = table_name + def __init__(self, **kwargs): + super(ZohoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py new file mode 100644 index 000000000000..ef5a67d4fe35 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ZohoObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py index 763b95296d1a..274c6fc09f19 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -15,6 +15,8 @@ class ZohoSource(CopySource): """A copy activity Zoho server source. + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] @@ -29,7 +31,7 @@ class ZohoSource(CopySource): for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param type: Constant filled by server. + :param type: Required. Constant filled by server. :type type: str :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). @@ -49,7 +51,7 @@ class ZohoSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None): - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections) - self.query = query + def __init__(self, **kwargs): + super(ZohoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py new file mode 100644 index 000000000000..6d7dc29bdf8a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py index ec8288ad562a..f338a1a9c835 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py @@ -76,6 +76,7 @@ def query_by_pipeline_run( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -88,9 +89,8 @@ def query_by_pipeline_run( body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py index d2768372d196..278815d03479 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py @@ -77,7 +77,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -86,9 +86,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -152,6 +151,7 @@ def create_or_update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -166,9 +166,8 @@ def create_or_update( body_content = self._serialize.body(dataset, 'DatasetResource') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -227,7 +226,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -238,8 +237,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -293,7 +292,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -302,8 +300,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py index bacdfbf2d591..4a648d96586c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -73,6 +73,7 @@ def get_feature_value( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -85,9 +86,8 @@ def get_feature_value( body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -145,6 +145,7 @@ def get_feature_value_by_factory( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -157,9 +158,8 @@ def get_feature_value_by_factory( body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py index 938fded17c2e..b06c12f3e8c5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py @@ -71,7 +71,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -80,9 +80,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -139,6 +138,7 @@ def configure_factory_repo( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -151,9 +151,8 @@ def configure_factory_repo( body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -209,7 +208,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -218,9 +217,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -279,6 +277,7 @@ def create_or_update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -293,9 +292,8 @@ def create_or_update( body_content = self._serialize.body(factory, 'Factory') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -353,6 +351,7 @@ def update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -365,9 +364,8 @@ def update( body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') # Construct and send request - request = self._client.patch(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -423,7 +421,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -434,8 +432,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -486,7 +484,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -495,8 +492,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -545,6 +542,7 @@ def get_git_hub_access_token( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -557,9 +555,8 @@ def get_git_hub_access_token( body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -613,6 +610,7 @@ def get_data_plane_access( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -625,9 +623,8 @@ def get_data_plane_access( body_content = self._serialize.body(policy, 'UserAccessPolicy') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py index c5609f8e3f57..81467b9e3385 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py @@ -78,7 +78,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -87,8 +87,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -145,7 +145,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -154,8 +153,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -214,6 +213,7 @@ def update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -226,9 +226,8 @@ def update( body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') # Construct and send request - request = self._client.patch(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -287,7 +286,7 @@ def get_ip_address( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -296,8 +295,8 @@ def get_ip_address( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py index b997792f6800..230f12d023c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.exceptions import DeserializationError -from msrestazure.azure_operation import AzureOperationPoller +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -58,7 +58,7 @@ def _refresh_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -67,8 +67,8 @@ def _refresh_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -87,7 +87,7 @@ def _refresh_initial( return deserialized def refresh( - self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): """Refresh a SSIS integration runtime object metadata. :param resource_group_name: The resource group name. @@ -97,13 +97,17 @@ def refresh( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns - SsisObjectMetadataStatusResponse or ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + SsisObjectMetadataStatusResponse or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] - or ~msrest.pipeline.ClientRawResponse + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse]] :raises: :class:`CloudError` """ raw_result = self._refresh_initial( @@ -114,30 +118,8 @@ def refresh( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', response) if raw: @@ -146,12 +128,13 @@ def get_long_running_output(response): return deserialized - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} def get( @@ -198,6 +181,7 @@ def get( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -213,9 +197,8 @@ def get( body_content = None # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py index 1a2ab3eb168b..0a64be3b1441 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.exceptions import DeserializationError -from msrestazure.azure_operation import AzureOperationPoller +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -79,7 +79,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -88,9 +88,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -154,6 +153,7 @@ def create_or_update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -168,9 +168,8 @@ def create_or_update( body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -229,7 +228,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -240,8 +239,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -308,6 +307,7 @@ def update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -320,9 +320,8 @@ def update( body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') # Construct and send request - request = self._client.patch(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -376,7 +375,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -385,8 +383,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -436,7 +434,7 @@ def get_status( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -445,8 +443,8 @@ def get_status( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -504,7 +502,7 @@ def get_connection_info( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -513,8 +511,8 @@ def get_connection_info( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -575,6 +573,7 @@ def regenerate_auth_key( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -587,9 +586,8 @@ def regenerate_auth_key( body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -644,7 +642,7 @@ def list_auth_keys( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -653,8 +651,8 @@ def list_auth_keys( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -692,7 +690,7 @@ def _start_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -701,8 +699,8 @@ def _start_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -721,7 +719,7 @@ def _start_initial( return deserialized def start( - self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): """Starts a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -731,13 +729,17 @@ def start( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns - IntegrationRuntimeStatusResponse or ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + IntegrationRuntimeStatusResponse or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] - or ~msrest.pipeline.ClientRawResponse + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse]] :raises: :class:`CloudError` """ raw_result = self._start_initial( @@ -748,30 +750,8 @@ def start( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) if raw: @@ -780,12 +760,13 @@ def get_long_running_output(response): return deserialized - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} @@ -807,7 +788,6 @@ def _stop_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -816,8 +796,8 @@ def _stop_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -829,7 +809,7 @@ def _stop_initial( return client_raw_response def stop( - self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): """Stops a ManagedReserved type integration runtime. :param resource_group_name: The resource group name. @@ -839,12 +819,14 @@ def stop( :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns None or - ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrest.pipeline.ClientRawResponse + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ raw_result = self._stop_initial( @@ -855,40 +837,19 @@ def stop( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} def sync_credentials( @@ -931,7 +892,6 @@ def sync_credentials( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -940,8 +900,8 @@ def sync_credentials( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -992,7 +952,7 @@ def get_monitoring_data( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -1001,8 +961,8 @@ def get_monitoring_data( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -1057,7 +1017,6 @@ def upgrade( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -1066,8 +1025,8 @@ def upgrade( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -1132,9 +1091,8 @@ def remove_links( body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -1189,6 +1147,7 @@ def create_linked_integration_runtime( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -1201,9 +1160,8 @@ def create_linked_integration_runtime( body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py index 98ef184787ba..e6878336df91 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py @@ -77,7 +77,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -86,9 +86,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -152,6 +151,7 @@ def create_or_update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -166,9 +166,8 @@ def create_or_update( body_content = self._serialize.body(linked_service, 'LinkedServiceResource') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -227,7 +226,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -238,8 +237,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -293,7 +292,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -302,8 +300,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py index 8af242714177..2273e12d5ada 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py @@ -67,7 +67,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -76,9 +76,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py index 3ce268e54664..de8744612d20 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py @@ -73,6 +73,7 @@ def query_by_factory( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -85,9 +86,8 @@ def query_by_factory( body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -142,7 +142,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -151,8 +151,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -211,7 +211,6 @@ def cancel( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -220,8 +219,8 @@ def cancel( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py index b0f0cb0ec42f..343396e705ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -77,7 +77,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -86,9 +86,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -150,6 +149,7 @@ def create_or_update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -164,9 +164,8 @@ def create_or_update( body_content = self._serialize.body(pipeline, 'PipelineResource') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -225,7 +224,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -236,8 +235,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -291,7 +290,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -300,8 +298,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -369,6 +367,7 @@ def create_run( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -384,9 +383,8 @@ def create_run( body_content = None # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py index d907454f4d93..58e0066a60dd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.exceptions import DeserializationError -from msrestazure.azure_operation import AzureOperationPoller +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -82,6 +82,7 @@ def create( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -94,9 +95,8 @@ def create( body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -135,7 +135,6 @@ def _start_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -144,8 +143,8 @@ def _start_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -157,7 +156,7 @@ def _start_initial( return client_raw_response def start( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Starts a trigger. :param resource_group_name: The resource group name. @@ -169,12 +168,14 @@ def start( :param rerun_trigger_name: The rerun trigger name. :type rerun_trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns None or - ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrest.pipeline.ClientRawResponse + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ raw_result = self._start_initial( @@ -186,40 +187,19 @@ def start( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} @@ -242,7 +222,6 @@ def _stop_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -251,8 +230,8 @@ def _stop_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -264,7 +243,7 @@ def _stop_initial( return client_raw_response def stop( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Stops a trigger. :param resource_group_name: The resource group name. @@ -276,12 +255,14 @@ def stop( :param rerun_trigger_name: The rerun trigger name. :type rerun_trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns None or - ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrest.pipeline.ClientRawResponse + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ raw_result = self._stop_initial( @@ -293,40 +274,19 @@ def stop( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} @@ -349,7 +309,6 @@ def _cancel_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -358,8 +317,8 @@ def _cancel_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -371,7 +330,7 @@ def _cancel_initial( return client_raw_response def cancel( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Cancels a trigger. :param resource_group_name: The resource group name. @@ -383,12 +342,14 @@ def cancel( :param rerun_trigger_name: The rerun trigger name. :type rerun_trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns None or - ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrest.pipeline.ClientRawResponse + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ raw_result = self._cancel_initial( @@ -400,40 +361,19 @@ def cancel( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} def list_by_trigger( @@ -479,7 +419,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -488,9 +428,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py index c0e9560f0a6e..e4e4774ae3bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py @@ -75,7 +75,6 @@ def rerun( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -84,8 +83,8 @@ def rerun( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -133,6 +132,7 @@ def query_by_factory( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -145,9 +145,8 @@ def query_by_factory( body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py index dc6791d33b40..d6a2d51cf85a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py @@ -12,8 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -from msrest.exceptions import DeserializationError -from msrestazure.azure_operation import AzureOperationPoller +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -79,7 +79,7 @@ def internal_paging(next_link=None, raw=False): # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -88,9 +88,8 @@ def internal_paging(next_link=None, raw=False): header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send( - request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -154,6 +153,7 @@ def create_or_update( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) @@ -168,9 +168,8 @@ def create_or_update( body_content = self._serialize.body(trigger, 'TriggerResource') # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -229,7 +228,7 @@ def get( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -240,8 +239,8 @@ def get( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 304]: exp = CloudError(response) @@ -295,7 +294,6 @@ def delete( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -304,8 +302,8 @@ def delete( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 204]: exp = CloudError(response) @@ -336,7 +334,7 @@ def _subscribe_to_events_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -345,8 +343,8 @@ def _subscribe_to_events_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -365,7 +363,7 @@ def _subscribe_to_events_initial( return deserialized def subscribe_to_events( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Subscribe event trigger to events. :param resource_group_name: The resource group name. @@ -375,13 +373,17 @@ def subscribe_to_events( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns - TriggerSubscriptionOperationStatus or ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + TriggerSubscriptionOperationStatus or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - or ~msrest.pipeline.ClientRawResponse + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] :raises: :class:`CloudError` """ raw_result = self._subscribe_to_events_initial( @@ -392,30 +394,8 @@ def subscribe_to_events( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) if raw: @@ -424,12 +404,13 @@ def get_long_running_output(response): return deserialized - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} def get_event_subscription_status( @@ -470,7 +451,7 @@ def get_event_subscription_status( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -479,8 +460,8 @@ def get_event_subscription_status( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -518,7 +499,7 @@ def _unsubscribe_from_events_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' + header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -527,8 +508,8 @@ def _unsubscribe_from_events_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) @@ -547,7 +528,7 @@ def _unsubscribe_from_events_initial( return deserialized def unsubscribe_from_events( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Unsubscribe event trigger from events. :param resource_group_name: The resource group name. @@ -557,13 +538,17 @@ def unsubscribe_from_events( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns - TriggerSubscriptionOperationStatus or ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + TriggerSubscriptionOperationStatus or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] - or ~msrest.pipeline.ClientRawResponse + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus]] :raises: :class:`CloudError` """ raw_result = self._unsubscribe_from_events_initial( @@ -574,30 +559,8 @@ def unsubscribe_from_events( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200, 202]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', response) if raw: @@ -606,12 +569,13 @@ def get_long_running_output(response): return deserialized - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} @@ -633,7 +597,6 @@ def _start_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -642,8 +605,8 @@ def _start_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -655,7 +618,7 @@ def _start_initial( return client_raw_response def start( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Starts a trigger. :param resource_group_name: The resource group name. @@ -665,12 +628,14 @@ def start( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns None or - ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrest.pipeline.ClientRawResponse + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ raw_result = self._start_initial( @@ -681,40 +646,19 @@ def start( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} @@ -736,7 +680,6 @@ def _stop_initial( # Construct headers header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: @@ -745,8 +688,8 @@ def _stop_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send(request, header_parameters, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -758,7 +701,7 @@ def _stop_initial( return client_raw_response def stop( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): """Stops a trigger. :param resource_group_name: The resource group name. @@ -768,12 +711,14 @@ def stop( :param trigger_name: The trigger name. :type trigger_name: str :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :return: An instance of AzureOperationPoller that returns None or - ClientRawResponse if raw=true + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrest.pipeline.ClientRawResponse + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError` """ raw_result = self._stop_initial( @@ -784,38 +729,17 @@ def stop( raw=True, **operation_config ) - if raw: - return raw_result - - # Construct and send request - def long_running_send(): - return raw_result.response - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - header_parameters = {} - header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id'] - return self._client.send( - request, header_parameters, stream=False, **operation_config) def get_long_running_output(response): - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response - long_running_operation_timeout = operation_config.get( + lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} diff --git a/swagger_to_sdk_config.json b/swagger_to_sdk_config.json index d43f1d077fa2..03773dcdd582 100644 --- a/swagger_to_sdk_config.json +++ b/swagger_to_sdk_config.json @@ -3,7 +3,7 @@ "meta": { "autorest_options": { "version": "preview", - "use": "@microsoft.azure/autorest.python@~2.1.40", + "use": "@microsoft.azure/autorest.python@~3.0.52", "python": "", "python-mode": "update", "sdkrel:python-sdks-folder": "./sdk/.", From eac9467162cdf1dd4eb53e0543637cfb13b78991 Mon Sep 17 00:00:00 2001 From: Zim Kalinowski Date: Fri, 30 Aug 2019 12:43:17 +0000 Subject: [PATCH 28/30] v3.0.52 --- swagger_to_sdk_config.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swagger_to_sdk_config.json b/swagger_to_sdk_config.json index 03773dcdd582..bae8c63cefa0 100644 --- a/swagger_to_sdk_config.json +++ b/swagger_to_sdk_config.json @@ -3,7 +3,7 @@ "meta": { "autorest_options": { "version": "preview", - "use": "@microsoft.azure/autorest.python@~3.0.52", + "use": "@microsoft.azure/autorest.python@~4.0.71", "python": "", "python-mode": "update", "sdkrel:python-sdks-folder": "./sdk/.", From 5a7d812e36cd425d0e5804c3863d8ab0257fcf16 Mon Sep 17 00:00:00 2001 From: zikalino Date: Fri, 30 Aug 2019 21:25:36 +0800 Subject: [PATCH 29/30] manually updated history and readded tabular translator and copy translator --- .../azure-mgmt-datafactory/HISTORY.rst | 112 ++++++++++++++++++ .../azure/mgmt/datafactory/models/__init__.py | 6 + .../datafactory/models/copy_translator.py | 46 +++++++ .../datafactory/models/copy_translator_py3.py | 46 +++++++ .../datafactory/models/tabular_translator.py | 51 ++++++++ .../models/tabular_translator_py3.py | 51 ++++++++ .../azure/mgmt/datafactory/version.py | 2 +- 7 files changed, 313 insertions(+), 1 deletion(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst index 3db402353eb7..2cd706997ea4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst @@ -147,6 +147,118 @@ Release History - Model AzureDatabricksLinkedService has a new parameter new_cluster_enable_elastic_disk - Added operation TriggerRunsOperations.rerun - Added operation ExposureControlOperations.get_feature_value_by_factory +- Added model Office365Dataset +- Added model AzureBlobFSDataset +- Added model CommonDataServiceForAppsEntityDataset +- Added model DynamicsCrmEntityDataset +- Added model AzureSqlMITableDataset +- Added model HdfsLocation +- Added model HttpServerLocation +- Added model SftpLocation +- Added model FtpServerLocation +- Added model FileServerLocation +- Added model AmazonS3Location +- Added model AzureDataLakeStoreLocation +- Added model AzureBlobFSLocation +- Added model AzureBlobStorageLocation +- Added model DatasetLocation +- Added model BinaryDataset +- Added model JsonDataset +- Added model DelimitedTextDataset +- Added model ParquetDataset +- Added model AvroDataset +- Added model GoogleAdWordsSource +- Added model OracleServiceCloudSource +- Added model DynamicsAXSource +- Added model NetezzaPartitionSettings +- Added model AzureMariaDBSource +- Added model AzureBlobFSSource +- Added model Office365Source +- Added model MongoDbCursorMethodsProperties +- Added model CosmosDbMongoDbApiSource +- Added model MongoDbV2Source +- Added model TeradataPartitionSettings +- Added model TeradataSource +- Added model OraclePartitionSettings +- Added model AzureDataExplorerSource +- Added model SqlMISource +- Added model AzureSqlSource +- Added model SqlServerSource +- Added model RestSource +- Added model SapTablePartitionSettings +- Added model SapTableSource +- Added model SapOpenHubSource +- Added model SapHanaSource +- Added model SalesforceServiceCloudSource +- Added model ODataSource +- Added model SapBwSource +- Added model SybaseSource +- Added model PostgreSqlSource +- Added model MySqlSource +- Added model OdbcSource +- Added model Db2Source +- Added model MicrosoftAccessSource +- Added model InformixSource +- Added model CommonDataServiceForAppsSource +- Added model DynamicsCrmSource +- Added model HdfsReadSettings +- Added model HttpReadSettings +- Added model SftpReadSettings +- Added model FtpReadSettings +- Added model FileServerReadSettings +- Added model AmazonS3ReadSettings +- Added model AzureDataLakeStoreReadSettings +- Added model AzureBlobFSReadSettings +- Added model AzureBlobStorageReadSettings +- Added model StoreReadSettings +- Added model BinarySource +- Added model JsonSource +- Added model FormatReadSettings +- Added model DelimitedTextReadSettings +- Added model DelimitedTextSource +- Added model ParquetSource +- Added model AvroSource +- Added model AzureDataExplorerCommandActivity +- Added model SSISAccessCredential +- Added model SSISLogLocation +- Added model CosmosDbMongoDbApiSink +- Added model SalesforceServiceCloudSink +- Added model AzureDataExplorerSink +- Added model CommonDataServiceForAppsSink +- Added model DynamicsCrmSink +- Added model MicrosoftAccessSink +- Added model InformixSink +- Added model AzureBlobFSSink +- Added model SqlMISink +- Added model AzureSqlSink +- Added model SqlServerSink +- Added model FileServerWriteSettings +- Added model AzureDataLakeStoreWriteSettings +- Added model AzureBlobFSWriteSettings +- Added model AzureBlobStorageWriteSettings +- Added model StoreWriteSettings +- Added model BinarySink +- Added model ParquetSink +- Added model JsonWriteSettings +- Added model DelimitedTextWriteSettings +- Added model FormatWriteSettings +- Added model AvroWriteSettings +- Added model AvroSink +- Added model AzureMySqlSink +- Added model AzurePostgreSqlSink +- Added model JsonSink +- Added model DelimitedTextSink +- Added model WebHookActivity +- Added model ValidationActivity +- Added model EntityReference +- Added model IntegrationRuntimeDataProxyProperties +- Added model SsisVariable +- Added model SsisEnvironment +- Added model SsisParameter +- Added model SsisPackage +- Added model SsisEnvironmentReference +- Added model SsisProject +- Added model SsisFolder **Breaking changes** diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 395f7908afbd..a04232d5b860 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -435,6 +435,8 @@ from .hd_insight_hive_activity_py3 import HDInsightHiveActivity from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings + from .tabular_translator_py3 import TabularTranslator + from .copy_translator_py3 import CopyTranslator from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink from .salesforce_sink_py3 import SalesforceSink @@ -951,6 +953,8 @@ from .hd_insight_hive_activity import HDInsightHiveActivity from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings + from .tabular_translator import TabularTranslator + from .copy_translator import CopyTranslator from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_service_cloud_sink import SalesforceServiceCloudSink from .salesforce_sink import SalesforceSink @@ -1550,6 +1554,8 @@ 'HDInsightHiveActivity', 'RedirectIncompatibleRowSettings', 'StagingSettings', + 'TabularTranslator', + 'CopyTranslator', 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink', 'SalesforceSink', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py new file mode 100644 index 000000000000..2b0242ef997c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopyTranslator(Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__(self, **kwargs): + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py new file mode 100644 index 000000000000..3fef58394fd0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopyTranslator(Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py new file mode 100644 index 000000000000..fdd098ae9659 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_translator import CopyTranslator + + +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, + Group: MyGroup, Name: MyName" Type: string (or Expression with resultType + string). + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and + hierarchical data. Example: {"Column1": "$.Column1", "Column2": + "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or + Expression with resultType object). + :type schema_mapping: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TabularTranslator, self).__init__(**kwargs) + self.column_mappings = kwargs.get('column_mappings', None) + self.schema_mapping = kwargs.get('schema_mapping', None) + self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py new file mode 100644 index 000000000000..0bd2ce51a0f0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_translator_py3 import CopyTranslator + + +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, + Group: MyGroup, Name: MyName" Type: string (or Expression with resultType + string). + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and + hierarchical data. Example: {"Column1": "$.Column1", "Column2": + "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or + Expression with resultType object). + :type schema_mapping: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, **kwargs) -> None: + super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) + self.column_mappings = column_mappings + self.schema_mapping = schema_mapping + self.type = 'TabularTranslator' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index a39916c162ce..e4f3d5055303 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "0.8.0" From 2f6a699891063b9c7715c9056d3be26008c3d0ef Mon Sep 17 00:00:00 2001 From: zikalino Date: Fri, 30 Aug 2019 21:27:56 +0800 Subject: [PATCH 30/30] changed date to 08-30 --- sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst index 2cd706997ea4..d6473844c5c8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst +++ b/sdk/datafactory/azure-mgmt-datafactory/HISTORY.rst @@ -3,7 +3,7 @@ Release History =============== -0.8.0 (2019-08-07) +0.8.0 (2019-08-30) ++++++++++++++++++ **Features**